From 3e58f4e6c367ded730dd09255471b1d554a28cd6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 20 Sep 2023 06:13:06 +0000 Subject: [PATCH 001/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b3e67403aaf --- build-tools-internal/version.properties | 2 +- docs/Versions.asciidoc | 4 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 3 files changed, 75 insertions(+), 75 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 151381bfa0cb1..9fff8c63f5f56 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.8.0-snapshot-1f8e08481c2 +lucene = 9.9.0-snapshot-b3e67403aaf bundled_jdk_vendor = openjdk bundled_jdk = 20.0.2+9@6e380f22cbe7469fa75fb448bd903d8e diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 47e9071679cc4..3f44db9928434 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.8.0 -:lucene_version_path: 9_8_0 +:lucene_version: 9.9.0 +:lucene_version_path: 9_9_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9e8c193fa705f..67d8653732d8f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2528,124 +2528,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 81c86035a83d23a90a6f64b200636df35ea5ec8e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 20 Sep 2023 08:30:09 -0400 Subject: [PATCH 002/263] Adds new max_inner_product vector similarity function (#99527) Adds new max_inner_product vector similarity function. This differs from dot_product in the following ways: Doesn't require vectors to be normalized Scales the similarity between vectors differently to prevent negative scores --- docs/changelog/99527.yaml | 5 ++ .../mapping/types/dense-vector.asciidoc | 10 ++- .../test/search.vectors/40_knn_search.yml | 85 +++++++++++++++++++ .../vectors/DenseVectorFieldMapper.java | 8 ++ 4 files changed, 107 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/99527.yaml diff --git a/docs/changelog/99527.yaml b/docs/changelog/99527.yaml new file mode 100644 index 0000000000000..19eef621fa500 --- /dev/null +++ b/docs/changelog/99527.yaml @@ -0,0 +1,5 @@ +pr: 99445 +summary: Add new max_inner_product vector similarity function +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index fb50ee36644a6..96427a01e61d5 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -159,7 +159,7 @@ distance) between the vectors. The document `_score` is computed as `1 / (1 + l2_norm(query, vector)^2)`. `dot_product`::: -Computes the dot product of two vectors. This option provides an optimized way +Computes the dot product of two unit vectors. This option provides an optimized way to perform cosine similarity. The constraints and computed score are defined by `element_type`. + @@ -181,6 +181,14 @@ original vectors and cannot normalize them in advance. The document `_score` is computed as `(1 + cosine(query, vector)) / 2`. The `cosine` similarity does not allow vectors with zero magnitude, since cosine is not defined in this case. + +`max_inner_product`::: +Computes the maximum inner product of two vectors. This is similar to `dot_product`, +but doesn't require vectors to be normalized. This means that each vector's magnitude +can significantly effect the score. The document `_score` is adjusted to prevent negative +values. For `max_inner_product` values `< 0`, the `_score` is +`1 / (1 + -1 * max_inner_product(query, vector))`. For non-negative `max_inner_product` results +the `_score` is calculated `max_inner_product(query, vector) + 1`. ==== NOTE: Although they are conceptually related, the `similarity` parameter is diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index f34aef9b83321..340cd8f8d0f70 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -368,3 +368,88 @@ setup: filter: {"term": {"name": "cow.jpg"}} - length: {hits.hits: 0} +--- +"Knn search with mip": + - skip: + version: ' - 8.10.99' + reason: 'mip similarity added in 8.11' + features: close_to + + - do: + indices.create: + index: mip + body: + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: max_inner_product + + - do: + index: + index: mip + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + + - do: + index: + index: mip + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + + - do: + index: + index: mip + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - do: + indices.refresh: {} + + - do: + search: + index: mip + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + + + - length: {hits.hits: 3} + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 58694.902, error: 0.01}} + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 34702.79, error: 0.01}} + - match: {hits.hits.2._id: "2"} + - close_to: {hits.hits.2._score: {value: 33686.29, error: 0.01}} + + - do: + search: + index: mip + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: { "term": { "name": "moose.jpg" } } + + + + - length: {hits.hits: 1} + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 33686.29, error: 0.01}} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 28f83a167fda3..dc90dc7382780 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -661,6 +661,14 @@ float score(float similarity, ElementType elementType, int dim) { case FLOAT -> (1 + similarity) / 2f; }; } + }, + MAX_INNER_PRODUCT(VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT) { + @Override + float score(float similarity, ElementType elementType, int dim) { + return switch (elementType) { + case BYTE, FLOAT -> similarity < 0 ? 1 / (1 + -1 * similarity) : similarity + 1; + }; + } }; public final VectorSimilarityFunction function; From 0433159cf1ae7509b839ae51ae1ec21d5174df0b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 21 Sep 2023 06:17:35 +0000 Subject: [PATCH 003/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-f01ff9d1f51 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9fff8c63f5f56..19cb3843a40eb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-b3e67403aaf +lucene = 9.9.0-snapshot-f01ff9d1f51 bundled_jdk_vendor = openjdk bundled_jdk = 20.0.2+9@6e380f22cbe7469fa75fb448bd903d8e diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 67d8653732d8f..e89595774b2e1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2528,124 +2528,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 8e24410f763188976adb9c5f3b0d9b03d32dd355 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 22 Sep 2023 06:15:53 +0000 Subject: [PATCH 004/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-be57460b060 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 19cb3843a40eb..55e98cc1482cf 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-f01ff9d1f51 +lucene = 9.9.0-snapshot-be57460b060 bundled_jdk_vendor = openjdk bundled_jdk = 20.0.2+9@6e380f22cbe7469fa75fb448bd903d8e diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index e89595774b2e1..2179faa1052c4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2528,124 +2528,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0b971d2cf02f4c47863a264203f3c2ef84272cd5 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Sep 2023 10:21:20 +0200 Subject: [PATCH 005/263] Fix compilation after refactoring of TermStates. --- .../index/mapper/extras/SourceConfirmedTextQuery.java | 2 +- .../org/elasticsearch/lucene/queries/BlendedTermQuery.java | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index 9faee0282b12c..3c6b865266e21 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -231,7 +231,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo for (Term term : terms) { TermStates ts = termStates.computeIfAbsent(term, t -> { try { - return TermStates.build(searcher.getTopReaderContext(), t, scoreMode.needsScores()); + return TermStates.build(searcher, t, scoreMode.needsScores()); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java index a49f02acf4c4d..d88e0e0dd9fcf 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java @@ -73,15 +73,14 @@ public Query rewrite(IndexSearcher searcher) throws IOException { if (rewritten != this) { return rewritten; } - IndexReader reader = searcher.getIndexReader(); - IndexReaderContext context = reader.getContext(); TermStates[] ctx = new TermStates[terms.length]; int[] docFreqs = new int[ctx.length]; for (int i = 0; i < terms.length; i++) { - ctx[i] = TermStates.build(context, terms[i], true); + ctx[i] = TermStates.build(searcher, terms[i], true); docFreqs[i] = ctx[i].docFreq(); } + final IndexReader reader = searcher.getIndexReader(); final int maxDoc = reader.maxDoc(); blend(ctx, maxDoc, reader); return topLevelQuery(terms, ctx, docFreqs, maxDoc); From 6cf0c30b66511c18e2265277e9c7cc4a8f6a3c3f Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 22 Sep 2023 12:11:05 +0200 Subject: [PATCH 006/263] Refactor changes to IndexVersion. (#99312) This adds a version for the Lucene upgrade and adjusts some tests. --- .../main/java/org/elasticsearch/index/IndexVersion.java | 8 +++++--- .../java/org/elasticsearch/index/IndexVersionTests.java | 6 ++++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index 4afbbc851026f..5df3999a75316 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -117,14 +117,16 @@ private static IndexVersion registerIndexVersion(int id, Version luceneVersion, public static final IndexVersion V_8_9_0 = registerIndexVersion(8_09_00_99, Version.LUCENE_9_7_0, "32f6dbab-cc24-4f5b-87b5-015a848480d9"); public static final IndexVersion V_8_9_1 = registerIndexVersion(8_09_01_99, Version.LUCENE_9_7_0, "955a80ac-f70c-40a5-9399-1d8a1e5d342d"); public static final IndexVersion V_8_10_0 = registerIndexVersion(8_10_00_99, Version.LUCENE_9_7_0, "2e107286-12ad-4c51-9a6f-f8943663b6e7"); - public static final IndexVersion V_8_11_0 = registerIndexVersion(8_11_00_99, Version.LUCENE_9_8_0, "f08382c0-06ab-41f4-a56a-cf5397275627"); + public static final IndexVersion V_8_11_0 = registerIndexVersion(8_11_00_99, Version.LUCENE_9_7_0, "f08382c0-06ab-41f4-a56a-cf5397275627"); /* * READ THE COMMENT BELOW THIS BLOCK OF DECLARATIONS BEFORE ADDING NEW INDEX VERSIONS * Detached index versions added below here. */ - public static final IndexVersion V_8_500_000 = registerIndexVersion(8_500_000, Version.LUCENE_9_8_0, "bf656f5e-5808-4eee-bf8a-e2bf6736ff55"); - public static final IndexVersion V_8_500_001 = registerIndexVersion(8_500_001, Version.LUCENE_9_8_0, "45045a5a-fc57-4462-89f6-6bc04cda6015"); + public static final IndexVersion V_8_500_000 = registerIndexVersion(8_500_000, Version.LUCENE_9_7_0, "bf656f5e-5808-4eee-bf8a-e2bf6736ff55"); + public static final IndexVersion V_8_500_001 = registerIndexVersion(8_500_001, Version.LUCENE_9_7_0, "45045a5a-fc57-4462-89f6-6bc04cda6015"); + + public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = registerIndexVersion(8_500_010, Version.LUCENE_9_9_0, "ee5ab2e6-4d8f-11ee-be56-0242ac120002"); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java index 452da5279f4c1..2fd7af9dcdd87 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java @@ -107,8 +107,10 @@ public void testDefinedConstants() throws IllegalAccessException { field.getModifiers() ); - Matcher matcher = historicalVersion.matcher(field.getName()); - if (matcher.matches()) { + Matcher matcher; + if ("UPGRADE_TO_LUCENE_9_9".equals(field.getName())) { + // OK + } else if ((matcher = historicalVersion.matcher(field.getName())).matches()) { // old-style version constant String idString = matcher.group(1) + padNumber(matcher.group(2)) + padNumber(matcher.group(3)) + "99"; assertEquals( From b066509e7ba5ae0d3c18a4d562bd9ba9caced1b8 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 23 Sep 2023 06:13:10 +0000 Subject: [PATCH 007/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-d3a3391d225 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 3bfd4759b0d61..a7723a5cbce7e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-be57460b060 +lucene = 9.9.0-snapshot-d3a3391d225 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2b28b9a2a17f5..f789e526e826e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2549,124 +2549,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 82df0692857351827cfe676428c3877be33ebde0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 26 Sep 2023 06:18:18 +0000 Subject: [PATCH 008/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-0fb47cd44a6 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a7723a5cbce7e..e20758406ee2f 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-d3a3391d225 +lucene = 9.9.0-snapshot-0fb47cd44a6 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f789e526e826e..641ac9100dd01 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2549,124 +2549,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 92deac7cd7b7c7b009cb036dc0c494be0b4e4ef5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 27 Sep 2023 06:15:08 +0000 Subject: [PATCH 009/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-0fb47cd44a6 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 641ac9100dd01..e31c4d1a47429 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2551,122 +2551,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From c56896fefbc3b7016867eaa2fb46c5816b8e1c1b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 27 Sep 2023 14:24:13 +0000 Subject: [PATCH 010/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-350de210c36 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e20758406ee2f..ec90ba3c4721f 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-0fb47cd44a6 +lucene = 9.9.0-snapshot-350de210c36 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 186f1ddf50c8d..040597c95d2a9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 3b5131ec96c1a087a580813e2532fecb04be7f1e Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 27 Sep 2023 20:04:48 +0200 Subject: [PATCH 011/263] Add RandomAccessInput#length to SeekTrackingDirectoryWrapper (#99962) Fix compiling error after adding a new method to RandomAccessInput interface. --- .../test/seektracker/SeekTrackingDirectoryWrapper.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java index 9b1991b52e500..9b3d31022c589 100644 --- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java +++ b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java @@ -143,6 +143,11 @@ public RandomAccessInput randomAccessSlice(long offset, long length) throws IOEx IndexInput slice = wrapIndexInput(directory, name, innerSlice); // return default impl return new RandomAccessInput() { + @Override + public long length() { + return slice.length(); + } + @Override public byte readByte(long pos) throws IOException { slice.seek(pos); From dc511398d4860d83f6490ceddf245e8b59623d7a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 28 Sep 2023 06:17:09 +0000 Subject: [PATCH 012/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-350de210c36 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 040597c95d2a9..1d054b584ff29 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2581,122 +2581,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 8209aa75fd56faf6594cfcf934855ceabf60c988 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 29 Sep 2023 06:15:31 +0000 Subject: [PATCH 013/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-9ba7f2dc4bc --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ec90ba3c4721f..b84f6f9697844 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-350de210c36 +lucene = 9.9.0-snapshot-9ba7f2dc4bc bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1d054b584ff29..311139031f7b4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0679604fa89521bec7bb7d9f45af5104cc43111c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 30 Sep 2023 06:13:58 +0000 Subject: [PATCH 014/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-7c1d1147beb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b84f6f9697844..e3b93ad792a2d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-9ba7f2dc4bc +lucene = 9.9.0-snapshot-7c1d1147beb bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 311139031f7b4..8f5fdddb6a3eb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6b23b7006bd584286810670f8f213da8c26a44e0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 1 Oct 2023 06:15:57 +0000 Subject: [PATCH 015/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-7c1d1147beb --- gradle/verification-metadata.xml | 58 ++++++++++++++++---------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3900d9a6db45a..a3d37e8576bcb 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -69,11 +69,11 @@ - - - - - + + + + + @@ -2581,122 +2581,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 9797c08ef0168f1df896ebcd46f268c63c83a130 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 2 Oct 2023 06:16:07 +0000 Subject: [PATCH 016/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-bab19260197 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e3b93ad792a2d..95ee0b579ad51 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-7c1d1147beb +lucene = 9.9.0-snapshot-bab19260197 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a3d37e8576bcb..782a137a75b86 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 39a3fbdb8da2f6e3bf7b758b759b267abc3940e4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 3 Oct 2023 06:16:14 +0000 Subject: [PATCH 017/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-8c994d1e7c4 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 95ee0b579ad51..4b547324a1c0d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-bab19260197 +lucene = 9.9.0-snapshot-8c994d1e7c4 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 782a137a75b86..1b28da4c9bb8f 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a3e1d3e8e50e2f6b8914c0f6388a2aec911d5344 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 5 Oct 2023 06:24:38 +0000 Subject: [PATCH 018/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-cccaa7e7298 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4b547324a1c0d..9e73b184d1f8e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.11.0 -lucene = 9.9.0-snapshot-8c994d1e7c4 +lucene = 9.9.0-snapshot-cccaa7e7298 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1b28da4c9bb8f..1ae54ec3876f1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2579,124 +2579,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0b001ea9de49feda0d2dc4d7a810f901ba58d0b0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 6 Oct 2023 06:26:48 +0000 Subject: [PATCH 019/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b85aeb3a4fa --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 149 ++++++++++++------------ 2 files changed, 73 insertions(+), 78 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6efa3d17d0274..0de3dc693e095 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-cccaa7e7298 +lucene = 9.9.0-snapshot-b85aeb3a4fa bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 56624b1e66354..0cac159905c9a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -36,11 +36,6 @@ - - - - - @@ -2589,124 +2584,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7e1ca81139586b54586691e9e7a921ce5509fc18 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 7 Oct 2023 06:20:32 +0000 Subject: [PATCH 020/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b85aeb3a4fa --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0cac159905c9a..d8bd1f64b29f1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2586,122 +2586,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 9740dd55543132874dbb5f0c2847ba64defb215e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 8 Oct 2023 06:24:23 +0000 Subject: [PATCH 021/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b85aeb3a4fa --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d8bd1f64b29f1..7c38a4d786b87 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2586,122 +2586,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 54bc0914f4298962347d367f32c040c2f70eb732 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 10 Oct 2023 06:09:46 +0000 Subject: [PATCH 022/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-455d4152d31 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 0de3dc693e095..d9974fd10cec2 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-b85aeb3a4fa +lucene = 9.9.0-snapshot-455d4152d31 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 7c38a4d786b87..a3b35899da816 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2584,124 +2584,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e6999042e219508599fc165c1492028ff133ac05 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 11 Oct 2023 06:09:49 +0000 Subject: [PATCH 023/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-823af4931aa --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d9974fd10cec2..56353b09ca80c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-455d4152d31 +lucene = 9.9.0-snapshot-823af4931aa bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a3b35899da816..5ef5243db8384 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2584,124 +2584,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From dce780a61be2c5608c393b9676a8e0dd4e2fd762 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 12 Oct 2023 06:09:22 +0000 Subject: [PATCH 024/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-aa968f96d6c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 56353b09ca80c..7e57fccd7d63c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-823af4931aa +lucene = 9.9.0-snapshot-aa968f96d6c bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5ef5243db8384..9333eadbb5cc2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2584,124 +2584,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0c32da2718f870e9788e468585c84e655c1de17e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 13 Oct 2023 06:09:41 +0000 Subject: [PATCH 025/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-4533dcea4ec --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7e57fccd7d63c..7b0866afbfd12 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-aa968f96d6c +lucene = 9.9.0-snapshot-4533dcea4ec bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2b18172c6c7d5..2fee33c5390cf 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From dae5f7a46bf6ca5cf929fa40e709446fa6b38ae6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 14 Oct 2023 06:13:34 +0000 Subject: [PATCH 026/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-a5f94b1e81e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7b0866afbfd12..eaedb3e67defb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-4533dcea4ec +lucene = 9.9.0-snapshot-a5f94b1e81e bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2fee33c5390cf..5f1caba9e9dda 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From ebb5d9eb9526171820ec17c6cccff6d70407d193 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 15 Oct 2023 06:09:27 +0000 Subject: [PATCH 027/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-a1bb48aa426 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index eaedb3e67defb..9ea4b7cce9ebb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-a5f94b1e81e +lucene = 9.9.0-snapshot-a1bb48aa426 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5f1caba9e9dda..a0d2b10515821 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 85e8f65c4e30ed239dc09c4935d413567d5fc1b2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 16 Oct 2023 06:09:30 +0000 Subject: [PATCH 028/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-28255de5bee --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9ea4b7cce9ebb..ef067340843d4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-a1bb48aa426 +lucene = 9.9.0-snapshot-28255de5bee bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a0d2b10515821..859a1285426a3 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 45a4c1c98c91cf97674bf7d612df96ba14b7c88a Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 16 Oct 2023 15:31:09 +0100 Subject: [PATCH 029/263] Fix compilation (#100903) --- server/src/main/java/org/elasticsearch/index/IndexVersion.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index e15bb414aca7a..df4fe4b85b3a8 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -128,7 +128,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT = def(8_500_002, Version.LUCENE_9_7_0); public static final IndexVersion UPGRADE_LUCENE_9_8 = def(8_500_003, Version.LUCENE_9_8_0); - public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = registerIndexVersion(8_500_010, Version.LUCENE_9_9_0, "ee5ab2e6-4d8f-11ee-be56-0242ac120002"); + public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = def(8_500_010, Version.LUCENE_9_9_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ From 5f9516868b0019820d52be73d0d55ac8f52b7181 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 17 Oct 2023 06:08:58 +0000 Subject: [PATCH 030/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-ba26abcaee9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ef067340843d4..4c0b643da3eb7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-28255de5bee +lucene = 9.9.0-snapshot-ba26abcaee9 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 859a1285426a3..4de38f2157c9e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From be758b8e7e2d96885e5f70e8207c73e77ec8feb2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 18 Oct 2023 06:09:24 +0000 Subject: [PATCH 031/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-18bb826564b --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4c0b643da3eb7..e51f4c3ae93c4 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-ba26abcaee9 +lucene = 9.9.0-snapshot-18bb826564b bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4de38f2157c9e..92347e2e0ed36 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From c1eae02e377d205933dcbc7bc70e59af535d2cc5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 19 Oct 2023 06:10:28 +0000 Subject: [PATCH 032/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-40fbff02f1e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e51f4c3ae93c4..9641d5aec7aa0 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-18bb826564b +lucene = 9.9.0-snapshot-40fbff02f1e bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 92347e2e0ed36..24b0b31540ab7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7d4bcd89b9e1f29470de0c408e5f53ab0a904384 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 20 Oct 2023 06:10:09 +0000 Subject: [PATCH 033/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-6fc3483e4fa --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9641d5aec7aa0..01cea3178f44c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-40fbff02f1e +lucene = 9.9.0-snapshot-6fc3483e4fa bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 24b0b31540ab7..ca0cf4849a4c5 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From bb83b44e7028e941d1343c75aa5a8e320b689f71 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 21 Oct 2023 06:09:33 +0000 Subject: [PATCH 034/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-07a76555d9e --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 01cea3178f44c..6b0c981726d36 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-6fc3483e4fa +lucene = 9.9.0-snapshot-07a76555d9e bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index ca0cf4849a4c5..05459c59da2b9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6b8723af1f26a08bb93323623f8a40445371363f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 22 Oct 2023 06:09:31 +0000 Subject: [PATCH 035/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-3292aca1f45 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 6b0c981726d36..51cb9246b7d3a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-07a76555d9e +lucene = 9.9.0-snapshot-3292aca1f45 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 05459c59da2b9..548c66a504dd4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d011f8b59227de3e0a6693c628880245a028ba9e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 23 Oct 2023 06:09:13 +0000 Subject: [PATCH 036/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-ad0f00a6cb2 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 51cb9246b7d3a..d7b23f6fe32b5 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-3292aca1f45 +lucene = 9.9.0-snapshot-ad0f00a6cb2 bundled_jdk_vendor = openjdk bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 548c66a504dd4..b8c77dc140ef2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e43dfadfb73161ed6b843a8e9a67de9d7ce0c065 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 25 Oct 2023 06:08:39 +0000 Subject: [PATCH 037/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-1cb1a14cc84 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 23c7f5a2dbf75..e9b28221c279a 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-ad0f00a6cb2 +lucene = 9.9.0-snapshot-1cb1a14cc84 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index bf61379907588..cffe5644489e2 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 16ac279abad98e216183dc01d72aa237c3854fc6 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 26 Oct 2023 06:08:31 +0000 Subject: [PATCH 038/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-170f594daea --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e9b28221c279a..65a590d0db701 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-1cb1a14cc84 +lucene = 9.9.0-snapshot-170f594daea bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cffe5644489e2..61a112ce064c4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 9d51f8b4629e06d7af8571e4ff7504e96894c803 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 26 Oct 2023 11:37:28 +0200 Subject: [PATCH 039/263] Update references to Lucene95Codec This is needed following https://github.com/apache/lucene/pull/12685 and https://github.com/apache/lucene/pull/12582 --- .../elasticsearch/common/lucene/Lucene.java | 2 +- .../index/codec/CodecService.java | 10 ++++---- .../index/codec/PerFieldMapperCodec.java | 4 ++-- .../vectors/DenseVectorFieldMapper.java | 6 +++-- .../IndexDiskUsageAnalyzerTests.java | 24 +++++++++---------- .../elasticsearch/index/codec/CodecTests.java | 12 +++++----- .../index/codec/PerFieldMapperCodecTests.java | 6 ++--- .../engine/CompletionStatsCacheTests.java | 4 ++-- .../vectors/DenseVectorFieldMapperTests.java | 7 +++--- .../index/mapper/MapperServiceTestCase.java | 4 ++-- .../sourceonly/SourceOnlySnapshot.java | 2 ++ 11 files changed, 43 insertions(+), 38 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index a53df0087b251..31a4ca97aad6a 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -87,7 +87,7 @@ import java.util.Objects; public class Lucene { - public static final String LATEST_CODEC = "Lucene95"; + public static final String LATEST_CODEC = "Lucene99"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index 990d44f5baefc..d4771ba74e0fb 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MapperService; @@ -35,11 +35,11 @@ public class CodecService { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene95Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene95Codec(Lucene95Codec.Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene99Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene99Codec(Lucene99Codec.Mode.BEST_COMPRESSION)); } else { - codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, bigArrays)); - codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays)); + codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, bigArrays)); + codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays)); } codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); for (String codec : Codec.availableCodecs()) { diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index df1aca3dc7b53..b406262fac3dc 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexMode; @@ -37,7 +37,7 @@ * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public class PerFieldMapperCodec extends Lucene95Codec { +public class PerFieldMapperCodec extends Lucene99Codec { private final MapperService mapperService; private final DocValuesFormat docValuesFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 6aaea1dd32285..c6098b1884a73 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -11,7 +11,8 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.KnnVectorsReader; import org.apache.lucene.codecs.KnnVectorsWriter; -import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsFormat; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -1086,7 +1087,8 @@ public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultForm format = defaultFormat; } else { HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) indexOptions; - format = new Lucene95HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction); + format = new Lucene99HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction, + new Lucene99ScalarQuantizedVectorsFormat()); } // It's legal to reuse the same format name as this is the same on-disk format. return new KnnVectorsFormat(format.getName()) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index fec7a86bd3e59..57dbb1e73f7c5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -13,8 +13,8 @@ import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.codecs.lucene90.Lucene90PostingsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; -import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; @@ -263,7 +263,7 @@ public void testKnnVectors() throws Exception { logger.info("--> stats {}", stats); long dataBytes = (long) numDocs * dimension * Float.BYTES; // size of flat vector data - long indexBytesEstimate = (long) numDocs * (Lucene95HnswVectorsFormat.DEFAULT_MAX_CONN / 4); // rough size of HNSW graph + long indexBytesEstimate = (long) numDocs * (Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN / 4); // rough size of HNSW graph assertThat("numDocs=" + numDocs + ";dimension=" + dimension, stats.total().getKnnVectorsBytes(), greaterThan(dataBytes)); long connectionOverhead = stats.total().getKnnVectorsBytes() - dataBytes; assertThat("numDocs=" + numDocs, connectionOverhead, greaterThan(indexBytesEstimate)); @@ -326,7 +326,7 @@ public void testTriangle() throws Exception { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene95Codec(Lucene95Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene99Codec(Lucene99Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { @@ -413,25 +413,25 @@ private static void addFieldsToDoc(Document doc, IndexableField[] fields) { enum CodecMode { BEST_SPEED { @Override - Lucene95Codec.Mode mode() { - return Lucene95Codec.Mode.BEST_SPEED; + Lucene99Codec.Mode mode() { + return Lucene99Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene95Codec.Mode mode() { - return Lucene95Codec.Mode.BEST_COMPRESSION; + Lucene99Codec.Mode mode() { + return Lucene99Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene95Codec.Mode mode(); + abstract Lucene99Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene95Codec(codecMode.mode())); + .setCodec(new Lucene99Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -639,7 +639,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene95Codec(mode.mode()) { + .setCodec(new Lucene99Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new Lucene90PostingsFormat(); @@ -652,7 +652,7 @@ public DocValuesFormat getDocValuesFormatForField(String field) { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { - return new Lucene95HnswVectorsFormat(); + return new Lucene99HnswVectorsFormat(); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index b7a5b665ce58f..625c536a1c0d5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -10,7 +10,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -44,21 +44,21 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene95Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene99Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene95Codec.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(actual); @@ -70,7 +70,7 @@ private void assertStoredFieldsCompressionEquals(Lucene95Codec.Mode expected, Co SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene95Codec.Mode.valueOf(v)); + assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); ir.close(); dir.close(); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java index adb6ef77f2873..e2a2c72d3eae3 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.index.codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -168,7 +168,7 @@ private PerFieldMapperCodec createCodec(boolean timestampField, boolean timeSeri """; mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); } - return new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); } public void testUseES87TSDBEncodingSettingDisabled() throws IOException { @@ -207,7 +207,7 @@ private PerFieldMapperCodec createCodec(boolean enableES87TSDBCodec, boolean tim settings.put(IndexSettings.TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING.getKey(), enableES87TSDBCodec); MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), settings.build(), "test"); mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); - return new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); + return new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE); } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 2a72b1fe40ec6..96c38efed5b53 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -44,7 +44,7 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final PostingsFormat postingsFormat = new Completion90PostingsFormat(); - indexWriterConfig.setCodec(new Lucene95Codec() { + indexWriterConfig.setCodec(new Lucene99Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index b10d756a6e458..d61960cfc0f51 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -53,8 +53,8 @@ import java.util.List; import java.util.Set; -import static org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat.DEFAULT_BEAM_WIDTH; -import static org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat.DEFAULT_MAX_CONN; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -973,10 +973,11 @@ public void testKnnVectorsFormat() throws IOException { Codec codec = codecService.codec("default"); assertThat(codec, instanceOf(PerFieldMapperCodec.class)); KnnVectorsFormat knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); - String expectedString = "Lucene95HnswVectorsFormat(name=Lucene95HnswVectorsFormat, maxConn=" + String expectedString = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, maxConn=" + m + ", beamWidth=" + efConstruction + + ", quantizer=Lucene99ScalarQuantizedVectorsFormat(name=Lucene99ScalarQuantizedVectorsFormat, quantile=null)" + ")"; assertEquals(expectedString, knnVectorsFormat.toString()); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 168ab8663a153..a0c6d34fc1f6a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -10,7 +10,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; @@ -245,7 +245,7 @@ protected static void withLuceneIndex( CheckedConsumer test ) throws IOException { IndexWriterConfig iwc = new IndexWriterConfig(IndexShard.buildIndexAnalyzer(mapperService)).setCodec( - new PerFieldMapperCodec(Lucene95Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) + new PerFieldMapperCodec(Lucene99Codec.Mode.BEST_SPEED, mapperService, BigArrays.NON_RECYCLING_INSTANCE) ); try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc)) { builder.accept(iw); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 50485ecc21d9a..4a6f6951ec4b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Sort; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; @@ -234,6 +235,7 @@ private SegmentCommitInfo syncSegment( si.name, si.maxDoc(), false, + si.getHasBlocks(), si.getCodec(), si.getDiagnostics(), si.getId(), From 6fd0776b43f416c2d71c77be537c24292dbe00a8 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 26 Oct 2023 11:42:40 +0200 Subject: [PATCH 040/263] Fix spotless issues --- .../index/mapper/vectors/DenseVectorFieldMapper.java | 7 +++++-- .../snapshots/sourceonly/SourceOnlySnapshot.java | 1 - 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index c6098b1884a73..5e89a25fe2eb2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -1087,8 +1087,11 @@ public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultForm format = defaultFormat; } else { HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) indexOptions; - format = new Lucene99HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction, - new Lucene99ScalarQuantizedVectorsFormat()); + format = new Lucene99HnswVectorsFormat( + hnswIndexOptions.m, + hnswIndexOptions.efConstruction, + new Lucene99ScalarQuantizedVectorsFormat() + ); } // It's legal to reuse the same format name as this is the same on-disk format. return new KnnVectorsFormat(format.getName()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 4a6f6951ec4b2..c332694d93975 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -30,7 +30,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Sort; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; From 156063945632b591d99f370e647ce414f907c5d5 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 26 Oct 2023 12:23:37 +0200 Subject: [PATCH 041/263] Address compile error in BWCCodec --- .../java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 5d834e0303a37..714f8be73c135 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -129,6 +129,7 @@ public static SegmentInfo wrap(SegmentInfo segmentInfo) { org.apache.lucene.util.Version.LATEST, segmentInfo.name, segmentInfo.maxDoc(), + segmentInfo.getHasBlocks(), segmentInfo.getUseCompoundFile(), codec, segmentInfo.getDiagnostics(), From 2d061460ab272108a50b1fcb4a0a90238c937f5d Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 26 Oct 2023 15:45:07 +0200 Subject: [PATCH 042/263] Fix compile errors on Lucene62SegmentInfoFormat and Lucene50SegmentInfoFormat These are needed after https://github.com/apache/lucene/pull/12685 --- .../lucene50/Lucene50SegmentInfoFormat.java | 15 ++++++++++++++- .../lucene62/Lucene62SegmentInfoFormat.java | 1 + 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50SegmentInfoFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50SegmentInfoFormat.java index cf4437a230c0d..a260722ee3501 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50SegmentInfoFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50SegmentInfoFormat.java @@ -70,7 +70,20 @@ public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOConte final Set files = input.readSetOfStrings(); final Map attributes = input.readMapOfStrings(); - si = new SegmentInfo(dir, version, null, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, null); + si = new SegmentInfo( + dir, + version, + null, + segment, + docCount, + isCompoundFile, + false, + null, + diagnostics, + segmentID, + attributes, + null + ); si.setFiles(files); } catch (Throwable exception) { priorE = exception; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62SegmentInfoFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62SegmentInfoFormat.java index b700c39591819..5416b1a9fbc5a 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62SegmentInfoFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62SegmentInfoFormat.java @@ -210,6 +210,7 @@ public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOConte segment, docCount, isCompoundFile, + false, null, diagnostics, segmentID, From 8cf48efe1256e9eb27245d2d1514063c13d427f3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 27 Oct 2023 06:09:07 +0000 Subject: [PATCH 043/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-2bb54320c33 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 65a590d0db701..a63cc5ff15927 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-170f594daea +lucene = 9.9.0-snapshot-2bb54320c33 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 61a112ce064c4..5da643e984688 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 5039406b9cfc730f3755b746a9c7974c9a4e18bf Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 27 Oct 2023 14:52:31 +0200 Subject: [PATCH 044/263] Fix arguments order in BWCCodec#wrap --- .../org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 714f8be73c135..df6fded49e6bb 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -129,8 +129,8 @@ public static SegmentInfo wrap(SegmentInfo segmentInfo) { org.apache.lucene.util.Version.LATEST, segmentInfo.name, segmentInfo.maxDoc(), - segmentInfo.getHasBlocks(), segmentInfo.getUseCompoundFile(), + segmentInfo.getHasBlocks(), codec, segmentInfo.getDiagnostics(), segmentInfo.getId(), From 21de5fe18f222321c616b5c56a19d8e6706319b5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 28 Oct 2023 06:08:58 +0000 Subject: [PATCH 045/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-063cfa7a85c --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a63cc5ff15927..65aa0da7db3ea 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-2bb54320c33 +lucene = 9.9.0-snapshot-063cfa7a85c bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5da643e984688..45e4a03680939 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 6b01689068316682670ff04db797cb243fc9a4a9 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 29 Oct 2023 06:09:16 +0000 Subject: [PATCH 046/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-5fe48424a25 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 65aa0da7db3ea..1154110164e6e 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-063cfa7a85c +lucene = 9.9.0-snapshot-5fe48424a25 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 45e4a03680939..4c7d9f7fbc717 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0bd8b91a211105f9521ea63b422fd8071a90bf32 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 31 Oct 2023 06:09:05 +0000 Subject: [PATCH 047/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-5b26498ec72 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1154110164e6e..1fdb01602227d 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-5fe48424a25 +lucene = 9.9.0-snapshot-5b26498ec72 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 4c7d9f7fbc717..754fcabf059a7 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e9776b20349800a245595628fbc831ba685f32d5 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 31 Oct 2023 11:44:42 +0100 Subject: [PATCH 048/263] Switch ContextIndexSearcher to use Lucene's TaskExecutor (#101537) We have contributed back to Lucene the changes we had made around running concurrent tasks. These include waiting for all tasks to finish when an exception is thrown, as well as not starting tasks when one of the previously run tasks throws an exception. The execution of concurrent tasks is now generalized within Lucene and exposed through a TaskExecutor that can be retrieved from the IndexSearcher and used to run tasks. We still have customizations that require us to override some of the search method, but with this change we rely on standard Lucene's behaviour for running concurrent tasks. --- .../search/internal/ContextIndexSearcher.java | 116 +----- .../internal/ContextIndexSearcherTests.java | 334 ------------------ 2 files changed, 9 insertions(+), 441 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 3c69db98c7588..3eac5b5378bdd 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -8,8 +8,6 @@ package org.elasticsearch.search.internal; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -36,9 +34,7 @@ import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.Bits; import org.apache.lucene.util.SparseFixedBitSet; -import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.lucene.util.CombinedBitSet; import org.elasticsearch.search.dfs.AggregatedDfs; @@ -58,21 +54,14 @@ import java.util.Objects; import java.util.PriorityQueue; import java.util.Set; -import java.util.concurrent.CancellationException; -import java.util.concurrent.ExecutionException; +import java.util.concurrent.Callable; import java.util.concurrent.Executor; -import java.util.concurrent.Future; -import java.util.concurrent.FutureTask; -import java.util.concurrent.RunnableFuture; -import java.util.concurrent.atomic.AtomicInteger; /** * Context-aware extension of {@link IndexSearcher}. */ public class ContextIndexSearcher extends IndexSearcher implements Releasable { - private static final Logger logger = LogManager.getLogger(ContextIndexSearcher.class); - /** * The interval at which we check for search cancellation when we cannot use * a {@link CancellableBulkScorer}. See {@link #intersectScorerAndBitSet}. @@ -143,7 +132,6 @@ public ContextIndexSearcher( int maximumNumberOfSlices, int minimumDocsPerSlice ) throws IOException { - // we need to pass the executor up so it can potentially be used by query rewrite, which does not rely on slicing super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader, executor); setSimilarity(similarity); setQueryCache(queryCache); @@ -324,22 +312,12 @@ public T search(Query query, CollectorManager col /** * Similar to the lucene implementation, with the following changes made: - * 1) it will wait for all threads to finish before returning when an exception is thrown. In that case, subsequent exceptions will be - * ignored and the first exception is re-thrown after all tasks are completed. - * 2) Tasks are cancelled on exception, as well as on timeout, to prevent needless computation - * 3) collection is unconditionally offloaded to the executor when set, even when there is a single slice or the request does not - * support concurrent collection. The executor is not set only when concurrent search has been explicitly disabled at the cluster level. - * 4) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads + * 1) postCollection is performed after each segment is collected. This is needed for aggregations, performed by search worker threads * so it can be parallelized. Also, it needs to happen in the same thread where doc_values are read, as it consumes them and Lucene * does not allow consuming them from a different thread. - * 5) handles the ES TimeExceededException + * 2) handles the ES TimeExceededException * */ private T search(Weight weight, CollectorManager collectorManager, C firstCollector) throws IOException { - // the executor will be null only when concurrency is disabled at the cluster level - if (getExecutor() == null) { - search(leafContexts, weight, firstCollector); - return collectorManager.reduce(Collections.singletonList(firstCollector)); - } LeafSlice[] leafSlices = getSlices(); if (leafSlices.length == 0) { assert leafContexts.isEmpty(); @@ -356,92 +334,16 @@ private T search(Weight weight, CollectorManager throw new IllegalStateException("CollectorManager does not always produce collectors with the same score mode"); } } - final List> listTasks = new ArrayList<>(); + final List> listTasks = new ArrayList<>(); for (int i = 0; i < leafSlices.length; ++i) { final LeafReaderContext[] leaves = leafSlices[i].leaves; final C collector = collectors.get(i); - AtomicInteger state = new AtomicInteger(0); - RunnableFuture task = new FutureTask<>(() -> { - if (state.compareAndSet(0, 1)) { - // A slice throws exception or times out: cancel all the tasks, to prevent slices that haven't started yet from - // starting and performing needless computation. - // TODO we will also want to cancel tasks that have already started, reusing the timeout mechanism - try { - search(Arrays.asList(leaves), weight, collector); - if (timeExceeded) { - for (Future future : listTasks) { - FutureUtils.cancel(future); - } - } - } catch (Exception e) { - for (Future future : listTasks) { - FutureUtils.cancel(future); - } - throw e; - } - return collector; - } - throw new CancellationException(); - }) { - @Override - public boolean cancel(boolean mayInterruptIfRunning) { - /* - Future#get (called down below after submitting all tasks) throws CancellationException for a cancelled task while - it is still running. It's important to make sure that search does not leave any tasks behind when it returns. - Overriding cancel ensures that tasks that are already started are left alone once cancelled, so Future#get will - wait for them to finish instead of throwing CancellationException. - Tasks that are cancelled before they are started won't start (same behaviour as the original implementation). - */ - return state.compareAndSet(0, -1); - } - - @Override - public boolean isCancelled() { - return state.get() == -1; - } - }; - listTasks.add(task); - } - logger.trace("Collecting using " + listTasks.size() + " tasks."); - - for (Runnable task : listTasks) { - getExecutor().execute(task); - } - RuntimeException exception = null; - final List collectedCollectors = new ArrayList<>(); - boolean cancellation = false; - for (Future future : listTasks) { - try { - collectedCollectors.add(future.get()); - } catch (InterruptedException e) { - if (exception == null) { - exception = new ThreadInterruptedException(e); - } else { - // we ignore further exceptions - } - } catch (ExecutionException e) { - if (exception == null) { - if (e.getCause() instanceof CancellationException) { - // thrown by the manual cancellation implemented above - we cancel on exception and we will throw the root cause - cancellation = true; - } else { - if (e.getCause() instanceof RuntimeException runtimeException) { - exception = runtimeException; - } else if (e.getCause() instanceof IOException ioException) { - throw ioException; - } else { - exception = new RuntimeException(e.getCause()); - } - } - } else { - // we ignore further exceptions - } - } - } - assert cancellation == false || exception != null || timeExceeded : "cancellation without an exception or timeout?"; - if (exception != null) { - throw exception; + listTasks.add(() -> { + search(Arrays.asList(leaves), weight, collector); + return collector; + }); } + List collectedCollectors = getTaskExecutor().invokeAll(listTasks); return collectorManager.reduce(collectedCollectors); } } diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 9e6b6330d2f23..a4e52af5f43c2 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -45,7 +45,6 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollectorManager; @@ -58,13 +57,10 @@ import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.SparseFixedBitSet; -import org.apache.lucene.util.ThreadInterruptedException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; @@ -81,17 +77,11 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; import java.util.Set; -import java.util.concurrent.CopyOnWriteArraySet; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executor; import java.util.concurrent.Executors; -import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.search.internal.ContextIndexSearcher.intersectScorerAndBitSet; import static org.elasticsearch.search.internal.ExitableDirectoryReader.ExitableLeafReader; @@ -525,330 +515,6 @@ public boolean isCacheable(LeafReaderContext ctx) { } } - /** - * Simulate one or more exceptions being thrown while collecting, through a custom query that throws IOException in its Weight#scorer. - * Verify that the slices that had to wait because there were no available threads in the pool are not started following the exception, - * which triggers a cancellation of all the tasks that are part of the running search. - * Simulate having N threads busy doing other work (e.g. other searches) otherwise all slices can be executed directly, given that - * the number of slices is dependent on the max pool size. - */ - public void testCancelSliceTasksOnException() throws Exception { - try (Directory dir = newDirectory()) { - indexDocs(dir); - int numThreads = randomIntBetween(4, 6); - int numBusyThreads = randomIntBetween(0, 3); - int numAvailableThreads = numThreads - numBusyThreads; - ThreadPoolExecutor executor = EsExecutors.newFixed( - ContextIndexSearcherTests.class.getName(), - numThreads, - -1, - EsExecutors.daemonThreadFactory(""), - new ThreadContext(Settings.EMPTY), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - ExecutorTestWrapper executorTestWrapper = new ExecutorTestWrapper(executor, numBusyThreads); - try (DirectoryReader directoryReader = DirectoryReader.open(dir)) { - Set throwingLeaves = new HashSet<>(); - Set scoredLeaves = new CopyOnWriteArraySet<>(); - final int[] newCollectorsCalls; - final boolean[] reduceCalled; - LeafSlice[] leafSlices; - try ( - ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher( - directoryReader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), - true, - executorTestWrapper, - executor.getMaximumPoolSize(), - 1 - ) - ) { - leafSlices = contextIndexSearcher.getSlices(); - int numThrowingLeafSlices = randomIntBetween(1, 3); - for (int i = 0; i < numThrowingLeafSlices; i++) { - LeafSlice throwingLeafSlice = leafSlices[randomIntBetween(0, Math.min(leafSlices.length, numAvailableThreads) - 1)]; - throwingLeaves.add(randomFrom(throwingLeafSlice.leaves)); - } - Query query = new TestQuery() { - @Override - public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { - return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - if (throwingLeaves.contains(context)) { - // a random segment of some random slices throws exception. Other slices may or may not have started - throw new IOException(); - } - scoredLeaves.add(context); - return new ConstantScoreScorer( - this, - boost, - ScoreMode.COMPLETE, - DocIdSetIterator.all(context.reader().maxDoc()) - ); - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - }; - newCollectorsCalls = new int[] { 0 }; - reduceCalled = new boolean[] { false }; - CollectorManager collectorManager = new CollectorManager<>() { - @Override - public Collector newCollector() { - newCollectorsCalls[0]++; - return new SimpleCollector() { - @Override - public void collect(int doc) { - - } - - @Override - public ScoreMode scoreMode() { - return ScoreMode.COMPLETE; - } - }; - } - - @Override - public Integer reduce(Collection collectors) { - reduceCalled[0] = true; - return null; - } - }; - expectThrows(IOException.class, () -> contextIndexSearcher.search(query, collectorManager)); - assertBusy(() -> { - // active count is approximate, wait until it converges to the expected number - if (executor.getActiveCount() > numBusyThreads) { - throw new AssertionError("no search tasks should be left running"); - } - }); - } - // as many tasks as slices have been created - assertEquals(leafSlices.length, newCollectorsCalls[0]); - // unexpected exception thrown, reduce is not called, there are no results to return - assertFalse(reduceCalled[0]); - Set expectedScoredLeaves = new HashSet<>(); - // the first N slices, where N is the number of available permits, will run straight-away, the others will be cancelled - for (int i = 0; i < leafSlices.length; i++) { - if (i == numAvailableThreads) { - break; - } - LeafSlice leafSlice = leafSlices[i]; - for (LeafReaderContext context : leafSlice.leaves) { - // collect the segments that we expect to score in each slice, and stop at those that throw - if (throwingLeaves.contains(context)) { - break; - } - expectedScoredLeaves.add(context); - } - } - // The slice that threw exception is not counted. The others that could be executed directly are, but they may have been - // cancelled before they could even start, hence we are going to score at most the segments that the slices that can be - // executed straight-away (before reaching the max pool size) are made of. We can't guarantee that we score all of them. - // We do want to guarantee that the remaining slices won't even start and none of their leaves are scored. - assertTrue(expectedScoredLeaves.containsAll(scoredLeaves)); - } finally { - executorTestWrapper.stopBusyThreads(); - terminate(executor); - } - } - } - - /** - * Simulate one or more timeout being thrown while collecting, through a custom query that times out in its Weight#scorer. - * Verify that the slices that had to wait because there were no available threads in the pool are not started following the timeout, - * which triggers a cancellation of all the tasks that are part of the running search. - * Simulate having N threads busy doing other work (e.g. other searches) otherwise all slices can be executed directly, given that - * the number of slices is dependent on the max pool size. - */ - public void testCancelSliceTasksOnTimeout() throws Exception { - try (Directory dir = newDirectory()) { - indexDocs(dir); - int numThreads = randomIntBetween(4, 6); - int numBusyThreads = randomIntBetween(0, 3); - int numAvailableThreads = numThreads - numBusyThreads; - ThreadPoolExecutor executor = EsExecutors.newFixed( - ContextIndexSearcherTests.class.getName(), - numThreads, - -1, - EsExecutors.daemonThreadFactory(""), - new ThreadContext(Settings.EMPTY), - EsExecutors.TaskTrackingConfig.DO_NOT_TRACK - ); - ExecutorTestWrapper executorTestWrapper = new ExecutorTestWrapper(executor, numBusyThreads); - try (DirectoryReader directoryReader = DirectoryReader.open(dir)) { - Set throwingLeaves = new HashSet<>(); - Set scoredLeaves = new CopyOnWriteArraySet<>(); - final int[] newCollectorsCalls; - final boolean[] reduceCalled; - LeafSlice[] leafSlices; - try ( - ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher( - directoryReader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), - true, - executorTestWrapper, - executor.getMaximumPoolSize(), - 1 - ) - ) { - leafSlices = contextIndexSearcher.getSlices(); - int numThrowingLeafSlices = randomIntBetween(1, 3); - for (int i = 0; i < numThrowingLeafSlices; i++) { - LeafSlice throwingLeafSlice = leafSlices[randomIntBetween(0, Math.min(leafSlices.length, numAvailableThreads) - 1)]; - throwingLeaves.add(randomFrom(throwingLeafSlice.leaves)); - } - Query query = new TestQuery() { - @Override - public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { - return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) { - if (throwingLeaves.contains(context)) { - // a random segment of some random slices throws exception. Other slices may or may not have - // started. - contextIndexSearcher.throwTimeExceededException(); - } - scoredLeaves.add(context); - return new ConstantScoreScorer( - this, - boost, - ScoreMode.COMPLETE, - DocIdSetIterator.all(context.reader().maxDoc()) - ); - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - }; - newCollectorsCalls = new int[] { 0 }; - reduceCalled = new boolean[] { false }; - CollectorManager collectorManager = new CollectorManager<>() { - @Override - public Collector newCollector() { - newCollectorsCalls[0]++; - return new SimpleCollector() { - @Override - public void collect(int doc) { - - } - - @Override - public ScoreMode scoreMode() { - return ScoreMode.COMPLETE; - } - }; - } - - @Override - public Integer reduce(Collection collectors) { - reduceCalled[0] = true; - return null; - } - }; - contextIndexSearcher.search(query, collectorManager); - assertBusy(() -> { - // active count is approximate, wait until it converges to the expected number - if (executor.getActiveCount() > numBusyThreads) { - throw new AssertionError("no search tasks should be left running"); - } - }); - assertTrue(contextIndexSearcher.timeExceeded()); - } - // as many tasks as slices have been created - assertEquals(leafSlices.length, newCollectorsCalls[0]); - assertTrue(reduceCalled[0]); - Set expectedScoredLeaves = new HashSet<>(); - // the first N slices, where N is the number of available permits, will run straight-away, the others will be cancelled - for (int i = 0; i < leafSlices.length; i++) { - if (i == numAvailableThreads) { - break; - } - LeafSlice leafSlice = leafSlices[i]; - for (LeafReaderContext context : leafSlice.leaves) { - // collect the segments that we expect to score in each slice, and stop at those that throw - if (throwingLeaves.contains(context)) { - break; - } - expectedScoredLeaves.add(context); - } - } - // The slice that timed out is not counted. The others that could be executed directly are, but they may have been - // cancelled before they could even start, hence we are going to score at most the segments that the slices that can be - // executed straight-away (before reaching the max pool size) are made of. We can't guarantee that we score all of them. - // We do want to guarantee that the remaining slices won't even start and none of their leaves are scored. - assertTrue(expectedScoredLeaves.containsAll(scoredLeaves)); - } finally { - executorTestWrapper.stopBusyThreads(); - terminate(executor); - } - } - } - - private static class ExecutorTestWrapper implements Executor { - private final ThreadPoolExecutor executor; - private final AtomicInteger startedTasks = new AtomicInteger(0); - private final CountDownLatch busyThreadsLatch = new CountDownLatch(1); - - ExecutorTestWrapper(ThreadPoolExecutor executor, int numBusyThreads) { - this.executor = executor; - // keep some of the threads occupied to simulate the situation where the slices tasks get queued up. - // This is a realistic scenario that does not get tested otherwise by executing a single concurrent search, given that the - // number of slices is capped by max pool size. - for (int i = 0; i < numBusyThreads; i++) { - execute(() -> { - try { - busyThreadsLatch.await(); - } catch (InterruptedException e) { - throw new ThreadInterruptedException(e); - } - }); - } - } - - void stopBusyThreads() { - busyThreadsLatch.countDown(); - } - - @Override - public void execute(Runnable command) { - int started = startedTasks.incrementAndGet(); - if (started > executor.getMaximumPoolSize()) { - try { - /* - There could be tasks that complete quickly before the exception is handled, which leaves room for new tasks that are - about to get cancelled to start before their cancellation becomes effective. We can accept that cancellation may or may - not be effective for the slices that belong to the first batch of tasks until all threads are busy and adjust the - test expectations accordingly, but for the subsequent slices, we want to assert that they are cancelled and never - executed. The only way to guarantee that is waiting for cancellation to kick in. - */ - assertBusy(() -> { - Future future = (Future) command; - if (future.isCancelled() == false) { - throw new AssertionError("task should be cancelled"); - } - }); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - executor.execute(command); - } - } - private static class TestQuery extends Query { @Override public String toString(String field) { From 58cf676d2038e07e00d2a73388b9eaf411b15529 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 1 Nov 2023 06:09:36 +0000 Subject: [PATCH 049/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-44479b3b48b --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1fdb01602227d..ce36e431fb62c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-5b26498ec72 +lucene = 9.9.0-snapshot-44479b3b48b bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 754fcabf059a7..90ae97b0ec1de 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0a1d15f1bcbb29de742d5d68afe176bab0102eb2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 2 Nov 2023 06:10:04 +0000 Subject: [PATCH 050/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-83727a88e62 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index ce36e431fb62c..e99c5f47c7a47 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-44479b3b48b +lucene = 9.9.0-snapshot-83727a88e62 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 90ae97b0ec1de..903f354776124 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From b13d5a4662cc8e314c9d1cb006d4abb38fd4b851 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 3 Nov 2023 06:09:24 +0000 Subject: [PATCH 051/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-ab9cbe5aa00 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index e99c5f47c7a47..dfb403ac75ee3 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-83727a88e62 +lucene = 9.9.0-snapshot-ab9cbe5aa00 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 903f354776124..a40e5b128f12d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 7256febbf48ec94cb40bb107f147dcbaf4a51b4f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 4 Nov 2023 06:09:03 +0000 Subject: [PATCH 052/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-71b3e4c97fb --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index dfb403ac75ee3..46a7ef02c5d99 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-ab9cbe5aa00 +lucene = 9.9.0-snapshot-71b3e4c97fb bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index a40e5b128f12d..607721884ec1b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 34afe08991663bdb0146105f37712ba855946f4a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 6 Nov 2023 16:58:00 +0000 Subject: [PATCH 053/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-6684da1908a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 46a7ef02c5d99..b3e85b93f13dd 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-71b3e4c97fb +lucene = 9.9.0-snapshot-6684da1908a bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 607721884ec1b..38113c66b90c4 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From da728c58154cc6ba204ba7557fb8bddd1c30af7a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 7 Nov 2023 12:40:31 +0000 Subject: [PATCH 054/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-f7c1de55999 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index b3e85b93f13dd..d5854f6fbe428 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-6684da1908a +lucene = 9.9.0-snapshot-f7c1de55999 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 38113c66b90c4..966f453198006 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 4c099703a298e9c61673200f78fa0a365407bc46 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 9 Nov 2023 07:09:20 +0000 Subject: [PATCH 055/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-d9109907bca --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d5854f6fbe428..d04783eeaa845 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-f7c1de55999 +lucene = 9.9.0-snapshot-d9109907bca bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 966f453198006..2bbeb9bf5738b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 180ef285c066f82575fdc1e2595869bcdb2c5748 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 9 Nov 2023 14:17:07 +0100 Subject: [PATCH 056/263] Fix compile errors (#101874) IndexDiskUsageAnalyzer and IndexDiskUsageAnalyzerTests, as well as CompletionFieldMapper, CompletionFieldMapperTests and CompletionStatsCacheTests need adjusting after apache/lucene#12741 , to refer to the latest postings format. KuromojiTokenizerFactory needs adjusting after apache/lucene#12390 --- .../analysis/kuromoji/KuromojiTokenizerFactory.java | 2 +- .../admin/indices/diskusage/IndexDiskUsageAnalyzer.java | 7 ++++++- .../elasticsearch/index/mapper/CompletionFieldMapper.java | 2 +- .../indices/diskusage/IndexDiskUsageAnalyzerTests.java | 8 ++++---- .../index/engine/CompletionStatsCacheTests.java | 4 ++-- .../index/mapper/CompletionFieldMapperTests.java | 4 ++-- 6 files changed, 16 insertions(+), 11 deletions(-) diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java index 038af3c2357f9..d662003530c22 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java @@ -12,7 +12,7 @@ import org.apache.lucene.analysis.ja.JapaneseTokenizer; import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode; import org.apache.lucene.analysis.ja.dict.UserDictionary; -import org.apache.lucene.analysis.ja.util.CSVUtil; +import org.apache.lucene.analysis.util.CSVUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index f232591a05a68..6587bf27f604a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.backward_codecs.lucene50.Lucene50PostingsFormat; import org.apache.lucene.backward_codecs.lucene84.Lucene84PostingsFormat; +import org.apache.lucene.backward_codecs.lucene90.Lucene90PostingsFormat; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.FieldsProducer; import org.apache.lucene.codecs.KnnVectorsReader; @@ -18,7 +19,7 @@ import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.TermVectorsReader; -import org.apache.lucene.codecs.lucene90.Lucene90PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; @@ -301,6 +302,9 @@ private static void readProximity(Terms terms, PostingsEnum postings) throws IOE private static BlockTermState getBlockTermState(TermsEnum termsEnum, BytesRef term) throws IOException { if (term != null && termsEnum.seekExact(term)) { final TermState termState = termsEnum.termState(); + if (termState instanceof final Lucene99PostingsFormat.IntBlockTermState blockTermState) { + return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); + } if (termState instanceof final Lucene90PostingsFormat.IntBlockTermState blockTermState) { return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); } @@ -310,6 +314,7 @@ private static BlockTermState getBlockTermState(TermsEnum termsEnum, BytesRef te if (termState instanceof final Lucene50PostingsFormat.IntBlockTermState blockTermState) { return new BlockTermState(blockTermState.docStartFP, blockTermState.posStartFP, blockTermState.payStartFP); } + assert false : "unsupported postings format: " + termState; } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 2859d8bb29917..94b937c534491 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -370,7 +370,7 @@ public CompletionFieldType fieldType() { } static PostingsFormat postingsFormat() { - return PostingsFormat.forName("Completion90"); + return PostingsFormat.forName("Completion99"); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 57dbb1e73f7c5..dbbba6d325cd4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,9 +12,9 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene90.Lucene90PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; @@ -54,7 +54,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; @@ -330,7 +330,7 @@ public void testCompletionField() throws Exception { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { - return new Completion90PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); + return new Completion99PostingsFormat(randomFrom(CompletionPostingsFormat.FSTLoadMode.values())); } else { return super.postingsFormat(); } @@ -642,7 +642,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire .setCodec(new Lucene99Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { - return new Lucene90PostingsFormat(); + return new Lucene99PostingsFormat(); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 96c38efed5b53..7c2c40e078cb4 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.elasticsearch.ElasticsearchException; @@ -43,7 +43,7 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion90PostingsFormat(); + final PostingsFormat postingsFormat = new Completion99PostingsFormat(); indexWriterConfig.setCodec(new Lucene99Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 99302e377b61f..1f473d0ade35b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -15,7 +15,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -149,7 +149,7 @@ public void testPostingsFormat() throws IOException { Codec codec = codecService.codec("default"); assertThat(codec, instanceOf(PerFieldMapperCodec.class)); PerFieldMapperCodec perFieldCodec = (PerFieldMapperCodec) codec; - assertThat(perFieldCodec.getPostingsFormatForField("field"), instanceOf(Completion90PostingsFormat.class)); + assertThat(perFieldCodec.getPostingsFormatForField("field"), instanceOf(Completion99PostingsFormat.class)); } public void testDefaultConfiguration() throws IOException { From 3f9ab8a3cbcd3c38c24914c78e5cb37747742f8a Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Thu, 9 Nov 2023 14:32:21 -0500 Subject: [PATCH 057/263] Adjust SortField comparators to use new Pruning API (#101983) Introduced in https://github.com/apache/lucene/pull/12405 We should account for the changes in our overrides and API. Now, to indicate that no skipping can occur, we utilize `Pruning.NONE`. --- .../action/search/BottomSortValuesCollector.java | 3 ++- .../fieldcomparator/BytesRefFieldComparatorSource.java | 5 +++-- .../fieldcomparator/DoubleValuesComparatorSource.java | 5 +++-- .../fieldcomparator/FloatValuesComparatorSource.java | 5 +++-- .../fieldcomparator/LongValuesComparatorSource.java | 5 +++-- .../lucene/grouping/SinglePassGroupingCollector.java | 3 ++- .../org/elasticsearch/lucene/grouping/TopFieldGroups.java | 3 ++- .../lucene/queries/SearchAfterSortedDocQuery.java | 3 ++- .../aggregations/bucket/composite/CompositeAggregator.java | 5 +++-- .../elasticsearch/search/sort/GeoDistanceSortBuilder.java | 5 +++-- .../org/elasticsearch/search/sort/ShardDocSortField.java | 5 +++-- .../action/search/BottomSortValuesCollectorTests.java | 3 ++- .../search/aggregations/metrics/InternalTopHitsTests.java | 3 ++- .../java/org/elasticsearch/search/query/QueryPhaseTests.java | 3 ++- .../search/searchafter/SearchAfterBuilderTests.java | 3 ++- 15 files changed, 37 insertions(+), 22 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java index 34566ec48ccad..4461b71be9047 100644 --- a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.elasticsearch.search.DocValueFormat; @@ -35,7 +36,7 @@ class BottomSortValuesCollector { this.reverseMuls = new int[sortFields.length]; this.sortFields = sortFields; for (int i = 0; i < sortFields.length; i++) { - comparators[i] = sortFields[i].getComparator(1, false); + comparators[i] = sortFields[i].getComparator(1, Pruning.NONE); reverseMuls[i] = sortFields[i].getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index addc6f33c9eba..2f80826c6cda0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.TermOrdValComparator; @@ -68,13 +69,13 @@ protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOEx protected void setScorer(Scorable scorer) {} @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed; final BytesRef missingBytes = (BytesRef) missingObject(missingValue, reversed); if (indexFieldData instanceof IndexOrdinalsFieldData) { - return new TermOrdValComparator(numHits, null, sortMissingLast, reversed, false) { + return new TermOrdValComparator(numHits, null, sortMissingLast, reversed, Pruning.NONE) { @Override protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 76463807942a2..f717ff440570d 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DoubleComparator; @@ -72,13 +73,13 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, doubl protected void setScorer(Scorable scorer) {} @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final double dMissingValue = (Double) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new DoubleComparator(numHits, null, null, reversed, false) { + return new DoubleComparator(numHits, null, null, reversed, Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 4b8351f430e05..e071be6c2a9a0 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -12,6 +12,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.FloatComparator; @@ -65,13 +66,13 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, float } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float fMissingValue = (Float) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new FloatComparator(numHits, null, null, reversed, false) { + return new FloatComparator(numHits, null, null, reversed, Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new FloatLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 827e1618adde2..989b09700890b 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.LongComparator; import org.apache.lucene.util.BitSet; @@ -94,13 +95,13 @@ private NumericDocValues getNumericDocValues(LeafReaderContext context, long mis } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final long lMissingValue = (Long) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new LongComparator(numHits, null, null, reversed, false) { + return new LongComparator(numHits, null, null, reversed, Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java b/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java index eaa49fceb4e63..b11a034ce4e4c 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; @@ -169,7 +170,7 @@ private SinglePassGroupingCollector( for (int i = 0; i < sortFields.length; i++) { final SortField sortField = sortFields[i]; // use topNGroups + 1 so we have a spare slot to use for comparing (tracked by this.spareSlot): - comparators[i] = sortField.getComparator(topNGroups + 1, false); + comparators[i] = sortField.getComparator(topNGroups + 1, Pruning.NONE); reversed[i] = sortField.getReverse() ? -1 : 1; } if (after != null) { diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java index 39c807119c481..8e5efa8a880b7 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java @@ -9,6 +9,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -121,7 +122,7 @@ private static class MergeSortQueue extends PriorityQueue { reverseMul = new int[sortFields.length]; for (int compIDX = 0; compIDX < sortFields.length; compIDX++) { final SortField sortField = sortFields[compIDX]; - comparators[compIDX] = sortField.getComparator(1, false); + comparators[compIDX] = sortField.getComparator(1, Pruning.NONE); reverseMul[compIDX] = sortField.getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java index 1bf6a1cd4f76c..c5802f092c033 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; @@ -52,7 +53,7 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { this.reverseMuls = new int[numFields]; for (int i = 0; i < numFields; i++) { SortField sortField = sort.getSort()[i]; - FieldComparator fieldComparator = sortField.getComparator(1, false); + FieldComparator fieldComparator = sortField.getComparator(1, Pruning.NONE); @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) fieldComparator; comparator.setTopValue(after.fields[i]); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index dff95332d3f16..1e8f2dbac33b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; @@ -359,8 +360,8 @@ public int hashCode() { } @Override - public FieldComparator getComparator(int numHits, boolean enableSkipping) { - return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), false) { + public FieldComparator getComparator(int numHits, Pruning enableSkipping) { + return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 2dceca2e9ad65..d53d3d2d637c9 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DoubleComparator; import org.apache.lucene.util.BitSet; @@ -663,8 +664,8 @@ private NumericDoubleValues getNumericDoubleValues(LeafReaderContext context) th } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { - return new DoubleComparator(numHits, null, null, reversed, false) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { + return new DoubleComparator(numHits, null, null, reversed, Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/elasticsearch/search/sort/ShardDocSortField.java b/server/src/main/java/org/elasticsearch/search/sort/ShardDocSortField.java index 58fd3029c0105..9cb554f560d84 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ShardDocSortField.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ShardDocSortField.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DocComparator; @@ -34,8 +35,8 @@ int getShardRequestIndex() { } @Override - public FieldComparator getComparator(int numHits, boolean enableSkipping) { - final DocComparator delegate = new DocComparator(numHits, getReverse(), false); + public FieldComparator getComparator(int numHits, Pruning enableSkipping) { + final DocComparator delegate = new DocComparator(numHits, getReverse(), Pruning.NONE); return new FieldComparator() { @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/BottomSortValuesCollectorTests.java b/server/src/test/java/org/elasticsearch/action/search/BottomSortValuesCollectorTests.java index 31f3fe7066bed..4305d0af9a7c1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/BottomSortValuesCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/BottomSortValuesCollectorTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; @@ -234,7 +235,7 @@ private Object[] newDateNanoArray(String... values) { private TopFieldDocs createTopDocs(SortField sortField, int totalHits, Object[] values) { FieldDoc[] fieldDocs = new FieldDoc[values.length]; @SuppressWarnings("unchecked") - FieldComparator cmp = (FieldComparator) sortField.getComparator(1, false); + FieldComparator cmp = (FieldComparator) sortField.getComparator(1, Pruning.NONE); for (int i = 0; i < values.length; i++) { fieldDocs[i] = new FieldDoc(i, Float.NaN, new Object[] { values[i] }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 35fe9c400888c..7d3799b2db35d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; @@ -367,7 +368,7 @@ private Comparator sortFieldsComparator(SortField[] sortFields) { FieldComparator[] comparators = new FieldComparator[sortFields.length]; for (int i = 0; i < sortFields.length; i++) { // Values passed to getComparator shouldn't matter - comparators[i] = sortFields[i].getComparator(0, false); + comparators[i] = sortFields[i].getComparator(0, Pruning.NONE); } return (lhs, rhs) -> { FieldDoc l = (FieldDoc) lhs; diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 9569bd982363e..59360b2d2013a 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -44,6 +44,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ScoreDoc; @@ -720,7 +721,7 @@ public void testIndexSortScrollOptimization() throws Exception { @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) searchSortAndFormat.sort.getSort()[i].getComparator( 1, - i == 0 + i == 0 ? Pruning.GREATER_THAN : Pruning.NONE ); int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]); if (cmp == 0) { diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index 74c4b991ff401..ff963835f55f6 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; @@ -216,7 +217,7 @@ public SortField.Type reducedType() { } @Override - public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, Pruning enableSkipping, boolean reversed) { return null; } From 2247cb80a905922a89c52568f6b110dab5a928fa Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 10 Nov 2023 07:08:28 +0000 Subject: [PATCH 058/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-9a0245333ff --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index d04783eeaa845..728740897f72c 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-d9109907bca +lucene = 9.9.0-snapshot-9a0245333ff bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 2bbeb9bf5738b..b21cc16639aa5 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From df125f2423440cb18ee6642cf854a748b4ee14c3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 12 Nov 2023 07:08:55 +0000 Subject: [PATCH 059/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-448e6112954 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 728740897f72c..f59cb6bf876fe 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-9a0245333ff +lucene = 9.9.0-snapshot-448e6112954 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b21cc16639aa5..f6d84dc8f8298 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From e3fc167bb8994c26d82802ce70830d267a02499a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 13 Nov 2023 07:09:17 +0000 Subject: [PATCH 060/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-448e6112954 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f6d84dc8f8298..127597284c632 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2641,122 +2641,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 6c4479db24e295c8d8987ee67ffa23a89218b023 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 13 Nov 2023 15:56:16 +0100 Subject: [PATCH 061/263] Resolve compile error in DenseVectorFieldMapper (#102066) * Resolve compile error in DenseVectorFieldMapper A change in Lucene99HnswVectorsFormat requires that we adapt our code, see https://github.com/apache/lucene/pull/12729 * Add new Lucene file extensions * Fixing format name check --------- Co-authored-by: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> --- .../index/mapper/vectors/DenseVectorFieldMapper.java | 7 +------ .../elasticsearch/index/store/LuceneFilesExtensions.java | 5 ++++- .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 2 +- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 5e89a25fe2eb2..fc3570c443aca 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -12,7 +12,6 @@ import org.apache.lucene.codecs.KnnVectorsReader; import org.apache.lucene.codecs.KnnVectorsWriter; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsFormat; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -1087,11 +1086,7 @@ public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultForm format = defaultFormat; } else { HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) indexOptions; - format = new Lucene99HnswVectorsFormat( - hnswIndexOptions.m, - hnswIndexOptions.efConstruction, - new Lucene99ScalarQuantizedVectorsFormat() - ); + format = new Lucene99HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction); } // It's legal to reuse the same format name as this is the same on-disk format. return new KnnVectorsFormat(format.getName()) { diff --git a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java index 7504f8983b87e..463ff90b47870 100644 --- a/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java +++ b/server/src/main/java/org/elasticsearch/index/store/LuceneFilesExtensions.java @@ -76,7 +76,10 @@ public enum LuceneFilesExtensions { // kNN vectors format VEC("vec", "Vector Data", false, true), VEX("vex", "Vector Index", false, true), - VEM("vem", "Vector Metadata", true, false); + VEM("vem", "Vector Metadata", true, false), + VEMF("vemf", "Flat Vector Metadata", true, false), + VEMQ("vemq", "Scalar Quantized Vector Metadata", true, false), + VEQ("veq", "Scalar Quantized Vector Data", false, true); /** * Allow plugin developers of custom codecs to opt out of the assertion in {@link #fromExtension} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index d61960cfc0f51..6c71a43e714fe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -977,7 +977,7 @@ public void testKnnVectorsFormat() throws IOException { + m + ", beamWidth=" + efConstruction - + ", quantizer=Lucene99ScalarQuantizedVectorsFormat(name=Lucene99ScalarQuantizedVectorsFormat, quantile=null)" + + ", flatVectorFormat=Lucene99FlatVectorsFormat()" + ")"; assertEquals(expectedString, knnVectorsFormat.toString()); } From dbd5cfc1beb6346469890ff10778a07472a51309 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 14 Nov 2023 07:08:56 +0000 Subject: [PATCH 062/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-0eda40a371b --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f59cb6bf876fe..2993f7dd9bf4b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-448e6112954 +lucene = 9.9.0-snapshot-0eda40a371b bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 127597284c632..9a0a9bbfe92f9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 440450ad7692a776c095bd284748fd9361e931f2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 15 Nov 2023 07:08:36 +0000 Subject: [PATCH 063/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-910c721e065 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 2993f7dd9bf4b..4286d1a41b850 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-0eda40a371b +lucene = 9.9.0-snapshot-910c721e065 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9a0a9bbfe92f9..6edb5c6f03d0e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 30ab3b5f7daccbedf6793e773e545dd6fcb4c2bf Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 15 Nov 2023 09:26:14 -0500 Subject: [PATCH 064/263] Fix failing tests due to new query to string format (#102186) There have been some changes around range query toString format. This commit adjusts tests expecting particular outputs from the previous Lucene version --- .../extras/ScaledFloatFieldTypeTests.java | 28 ++++++++++--------- .../validate/SimpleValidateQueryIT.java | 5 +++- .../index/mapper/RangeFieldTypeTests.java | 4 +-- .../geo/GeoDistanceQueryBuilderTestCase.java | 4 +-- 4 files changed, 23 insertions(+), 18 deletions(-) diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java index 603b19623a0e7..222f0f05d548d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java @@ -35,6 +35,8 @@ import java.util.Collections; import java.util.List; +import static org.hamcrest.Matchers.containsString; + public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { public void testTermQuery() { @@ -136,35 +138,35 @@ public void testRangeQuery() throws IOException { public void testRoundsUpperBoundCorrectly() { ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(null, 0.1, true, false, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 9]")); scaledFloatQ = ft.rangeQuery(null, 0.1, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 10]")); scaledFloatQ = ft.rangeQuery(null, 0.095, true, false, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 9]")); scaledFloatQ = ft.rangeQuery(null, 0.095, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 9]")); scaledFloatQ = ft.rangeQuery(null, 0.105, true, false, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 10]")); scaledFloatQ = ft.rangeQuery(null, 0.105, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 10]")); scaledFloatQ = ft.rangeQuery(null, 79.99, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9223372036854775808 TO 7999]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9223372036854775808 TO 7999]")); } public void testRoundsLowerBoundCorrectly() { ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(-0.1, null, false, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.1, null, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-10 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.095, null, false, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.095, null, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-9 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.105, null, false, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-10 TO 9223372036854775807]")); scaledFloatQ = ft.rangeQuery(-0.105, null, true, true, MOCK_CONTEXT); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertThat(scaledFloatQ.toString(), containsString("scaled_float:[-10 TO 9223372036854775807]")); } public void testValueForSearch() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index afb86bd175973..27fa53481edb7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -219,7 +219,10 @@ public void testExplainDateRangeInQueryString() { long twoMonthsAgo = now.minus(2, ChronoUnit.MONTHS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000; long rangeEnd = (now.plus(1, ChronoUnit.DAYS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000) - 1; - assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]")); + assertThat( + response.getQueryExplanation().get(0).getExplanation(), + containsString("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]") + ); assertThat(response.isValid(), equalTo(true)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index 5fe3711b1d034..1602e76c1a5fd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -233,12 +233,12 @@ public void testDateRangeQueryUsingMappingFormat() { RangeFieldType fieldType = new RangeFieldType("field", formatter); final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); - assertEquals("field:", query.toString()); + assertThat(query.toString(), containsString("field:")); // compare lower and upper bounds with what we would get on a `date` field DateFieldType dateFieldType = new DateFieldType("field", DateFieldMapper.Resolution.MILLISECONDS, formatter); final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); - assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString()); + assertThat(queryOnDateField.toString(), containsString("field:[1465975790000 TO 1466062190999]")); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java index c9520bcfd051e..3866a57761fef 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java @@ -325,9 +325,9 @@ private void assertGeoDistanceRangeQuery(String query, double lat, double lon, d // so we cannot access its fields directly to check and have to use toString() here instead. double qLat = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat)); double qLon = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon)); - assertEquals( + assertThat( parsedQuery.toString(), - "mapped_geo_point:" + qLat + "," + qLon + " +/- " + distanceUnit.toMeters(distance) + " meters" + containsString("mapped_geo_point:" + qLat + "," + qLon + " +/- " + distanceUnit.toMeters(distance) + " meters") ); } From fc87985f9b112ad8a45f88f25bf1cfe2a4d5e32a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 16 Nov 2023 07:09:14 +0000 Subject: [PATCH 065/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b13e4a121ab --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 4286d1a41b850..3da18cd611e78 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-910c721e065 +lucene = 9.9.0-snapshot-b13e4a121ab bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 6edb5c6f03d0e..7f2c0d2ca3c90 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 47f19f56f61d3d69eba74447036c12554f936512 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 17 Nov 2023 07:09:10 +0000 Subject: [PATCH 066/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-b13e4a121ab --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index fc8e3e4a115f9..5fb16c67e0caa 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2641,122 +2641,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 6e361631001b10fd5c2fafea19927b4edd2c4a8c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 18 Nov 2023 07:08:58 +0000 Subject: [PATCH 067/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-2e8dfac07e2 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 3da18cd611e78..a988ea573b4af 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-b13e4a121ab +lucene = 9.9.0-snapshot-2e8dfac07e2 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5fb16c67e0caa..1f59b87ff24f6 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 61891b42cb2cecc3a088b33cac9da4c7502db6d7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 19 Nov 2023 07:08:36 +0000 Subject: [PATCH 068/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-85e4deab437 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index a988ea573b4af..9359d4e68708b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-2e8dfac07e2 +lucene = 9.9.0-snapshot-85e4deab437 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 1f59b87ff24f6..f8a4cc3986946 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 0b27968705fc47a12261385dfa13946d01edfe6a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 20 Nov 2023 07:09:25 +0000 Subject: [PATCH 069/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-85e4deab437 --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index f8a4cc3986946..68581a6f04a0b 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2641,122 +2641,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 94ba92259222b096fe61fbd4dabcc0e033ea34eb Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 21 Nov 2023 07:08:53 +0000 Subject: [PATCH 070/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-6cd78318eab --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9359d4e68708b..7450cad8fb9c2 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-85e4deab437 +lucene = 9.9.0-snapshot-6cd78318eab bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 68581a6f04a0b..168e29ff3ef35 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From d4f01fc7b32e87f87426357be8a3f674fa48b6c2 Mon Sep 17 00:00:00 2001 From: Saikat Sarkar <132922331+saikatsarkar056@users.noreply.github.com> Date: Tue, 21 Nov 2023 12:16:21 -0700 Subject: [PATCH 071/263] Gather vector_operation count for knn search (#102032) --- docs/changelog/102032.yaml | 5 ++ docs/reference/search/profile.asciidoc | 3 +- .../rest-api-spec/test/search/370_profile.yml | 66 +++++++++++++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../vectors/DenseVectorFieldMapper.java | 14 ++-- .../elasticsearch/search/dfs/DfsPhase.java | 6 ++ .../search/profile/Profilers.java | 3 +- .../profile/SearchProfileDfsPhaseResult.java | 3 +- .../search/profile/dfs/DfsProfiler.java | 7 +- .../query/QueryProfileShardResult.java | 28 +++++++- .../search/profile/query/QueryProfiler.java | 10 +++ ...iversifyingChildrenByteKnnVectorQuery.java | 41 ++++++++++++ ...versifyingChildrenFloatKnnVectorQuery.java | 41 ++++++++++++ .../vectors/ProfilingKnnByteVectorQuery.java | 34 ++++++++++ .../vectors/ProfilingKnnFloatVectorQuery.java | 34 ++++++++++ .../search/vectors/ProfilingQuery.java | 27 ++++++++ .../query/QueryProfileShardResultTests.java | 4 +- ...AbstractKnnVectorQueryBuilderTestCase.java | 14 ++-- 18 files changed, 320 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/102032.yaml create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingDiversifyingChildrenByteKnnVectorQuery.java create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingDiversifyingChildrenFloatKnnVectorQuery.java create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingKnnByteVectorQuery.java create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingKnnFloatVectorQuery.java create mode 100644 server/src/main/java/org/elasticsearch/search/vectors/ProfilingQuery.java diff --git a/docs/changelog/102032.yaml b/docs/changelog/102032.yaml new file mode 100644 index 0000000000000..40463b9f252b9 --- /dev/null +++ b/docs/changelog/102032.yaml @@ -0,0 +1,5 @@ +pr: 102032 +summary: Add vector_operation_count in profile output for knn searches +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 52dfb91475c53..5b63929934770 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1272,6 +1272,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "dfs" : { "knn" : [ { + "vector_operations_count" : 4, "query" : [ { "type" : "DocAndScoreQuery", @@ -1321,7 +1322,7 @@ In the `dfs.knn` portion of the response we can see the output the of timings for <>, <>, and <>. Unlike many other queries, kNN search does the bulk of the work during the query rewrite. This means -`rewrite_time` represents the time spent on kNN search. +`rewrite_time` represents the time spent on kNN search. The attribute `vector_operations_count` represents the overall count of vector operations performed during the kNN search. [[profiling-considerations]] ===== Profiling Considerations diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 38212ba59a51e..0ead7b87f8acf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -229,6 +229,72 @@ dfs knn vector profiling: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } +--- +dfs knn vector profiling with vector_operations_count: + - skip: + version: ' - 8.11.99' + reason: vector_operations_count in dfs profiling added in 8.12.0 + + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { hits.total.value: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } + - match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 } + - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.match_count: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.match: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.shallow_advance_count: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.shallow_advance: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.next_doc_count: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.next_doc: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.score_count: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.score: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.compute_max_score_count: 0 } + - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.compute_max_score: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.build_scorer_count: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.build_scorer: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight: 0 } + - gt: { profile.shards.0.dfs.knn.0.query.0.breakdown.create_weight_count: 0 } + - gt: { profile.shards.0.dfs.knn.0.rewrite_time: 0 } + - match: { profile.shards.0.dfs.knn.0.collector.0.name: "SimpleTopScoreDocCollector" } + - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } + - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } + + --- dfs profile for search with dfs_query_then_fetch: - skip: diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5ad1d43c0d4f8..0e340f2336415 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -173,6 +173,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_OPENAI_ADDED = def(8_542_00_0); public static final TransportVersion SHUTDOWN_MIGRATION_STATUS_INCLUDE_COUNTS = def(8_543_00_0); public static final TransportVersion TRANSFORM_GET_CHECKPOINT_QUERY_AND_CLUSTER_ADDED = def(8_544_00_0); + public static final TransportVersion VECTOR_OPS_COUNT_ADDED = def(8_545_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index fc3570c443aca..bd598b29e3717 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -27,11 +27,9 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.KnnByteVectorQuery; -import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.DiversifyingChildrenByteKnnVectorQuery; -import org.apache.lucene.search.join.DiversifyingChildrenFloatKnnVectorQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -53,6 +51,10 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.vectors.ProfilingDiversifyingChildrenByteKnnVectorQuery; +import org.elasticsearch.search.vectors.ProfilingDiversifyingChildrenFloatKnnVectorQuery; +import org.elasticsearch.search.vectors.ProfilingKnnByteVectorQuery; +import org.elasticsearch.search.vectors.ProfilingKnnFloatVectorQuery; import org.elasticsearch.search.vectors.VectorSimilarityQuery; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -905,12 +907,12 @@ public Query createKnnQuery( bytes[i] = (byte) queryVector[i]; } yield parentFilter != null - ? new DiversifyingChildrenByteKnnVectorQuery(name(), bytes, filter, numCands, parentFilter) - : new KnnByteVectorQuery(name(), bytes, numCands, filter); + ? new ProfilingDiversifyingChildrenByteKnnVectorQuery(name(), bytes, filter, numCands, parentFilter) + : new ProfilingKnnByteVectorQuery(name(), bytes, numCands, filter); } case FLOAT -> parentFilter != null - ? new DiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) - : new KnnFloatVectorQuery(name(), queryVector, numCands, filter); + ? new ProfilingDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) + : new ProfilingKnnFloatVectorQuery(name(), queryVector, numCands, filter); }; if (similarityThreshold != null) { diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index 66ccae1746197..5d3288408c99b 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.vectors.KnnSearchBuilder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; +import org.elasticsearch.search.vectors.ProfilingQuery; import org.elasticsearch.tasks.TaskCancelledException; import java.io.IOException; @@ -215,6 +216,11 @@ static DfsKnnResults singleKnnSearch(Query knnQuery, int k, Profilers profilers, CollectorResult.REASON_SEARCH_TOP_HITS ); topDocs = searcher.search(knnQuery, ipcm); + + if (knnQuery instanceof ProfilingQuery profilingQuery) { + profilingQuery.profile(knnProfiler); + } + knnProfiler.setCollectorResult(ipcm.getCollectorTree()); } // Set profiler back after running KNN searches diff --git a/server/src/main/java/org/elasticsearch/search/profile/Profilers.java b/server/src/main/java/org/elasticsearch/search/profile/Profilers.java index 2cc29d654ec86..44ad9be7e1e94 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/Profilers.java +++ b/server/src/main/java/org/elasticsearch/search/profile/Profilers.java @@ -65,7 +65,8 @@ public SearchProfileQueryPhaseResult buildQueryPhaseResults() { QueryProfileShardResult result = new QueryProfileShardResult( queryProfiler.getTree(), queryProfiler.getRewriteTime(), - queryProfiler.getCollectorResult() + queryProfiler.getCollectorResult(), + null ); AggregationProfileShardResult aggResults = new AggregationProfileShardResult(aggProfiler.getTree()); return new SearchProfileQueryPhaseResult(Collections.singletonList(result), aggResults); diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java index 4e301d5a3300d..5f8e6a893c1b5 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileDfsPhaseResult.java @@ -148,7 +148,8 @@ QueryProfileShardResult combineQueryProfileShardResults() { return new QueryProfileShardResult( profileResults, totalRewriteTime, - new CollectorResult("KnnQueryCollector", CollectorResult.REASON_SEARCH_MULTI, totalCollectionTime, subCollectorResults) + new CollectorResult("KnnQueryCollector", CollectorResult.REASON_SEARCH_MULTI, totalCollectionTime, subCollectorResults), + null ); } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/dfs/DfsProfiler.java b/server/src/main/java/org/elasticsearch/search/profile/dfs/DfsProfiler.java index 72104aea8a9b8..0ef4704fa1894 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/dfs/DfsProfiler.java +++ b/server/src/main/java/org/elasticsearch/search/profile/dfs/DfsProfiler.java @@ -68,7 +68,12 @@ public SearchProfileDfsPhaseResult buildDfsPhaseResults() { final List queryProfileShardResult = new ArrayList<>(knnQueryProfilers.size()); for (QueryProfiler queryProfiler : knnQueryProfilers) { queryProfileShardResult.add( - new QueryProfileShardResult(queryProfiler.getTree(), queryProfiler.getRewriteTime(), queryProfiler.getCollectorResult()) + new QueryProfileShardResult( + queryProfiler.getTree(), + queryProfiler.getRewriteTime(), + queryProfiler.getCollectorResult(), + queryProfiler.getVectorOpsCount() + ) ); } return new SearchProfileDfsPhaseResult(dfsProfileResult, queryProfileShardResult); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java index 6c9f1edd6c583..e779152890541 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java @@ -8,10 +8,12 @@ package org.elasticsearch.search.profile.query; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -35,17 +37,27 @@ public final class QueryProfileShardResult implements Writeable, ToXContentObjec public static final String REWRITE_TIME = "rewrite_time"; public static final String QUERY_ARRAY = "query"; + public static final String VECTOR_OPERATIONS_COUNT = "vector_operations_count"; + private final List queryProfileResults; private final CollectorResult profileCollector; private final long rewriteTime; - public QueryProfileShardResult(List queryProfileResults, long rewriteTime, CollectorResult profileCollector) { + private final Long vectorOperationsCount; + + public QueryProfileShardResult( + List queryProfileResults, + long rewriteTime, + CollectorResult profileCollector, + @Nullable Long vectorOperationsCount + ) { assert (profileCollector != null); this.queryProfileResults = queryProfileResults; this.profileCollector = profileCollector; this.rewriteTime = rewriteTime; + this.vectorOperationsCount = vectorOperationsCount; } /** @@ -60,6 +72,9 @@ public QueryProfileShardResult(StreamInput in) throws IOException { profileCollector = new CollectorResult(in); rewriteTime = in.readLong(); + vectorOperationsCount = (in.getTransportVersion().onOrAfter(TransportVersions.VECTOR_OPS_COUNT_ADDED)) + ? in.readOptionalLong() + : null; } @Override @@ -70,6 +85,9 @@ public void writeTo(StreamOutput out) throws IOException { } profileCollector.writeTo(out); out.writeLong(rewriteTime); + if (out.getTransportVersion().onOrAfter(TransportVersions.VECTOR_OPS_COUNT_ADDED)) { + out.writeOptionalLong(vectorOperationsCount); + } } public List getQueryResults() { @@ -87,6 +105,9 @@ public CollectorResult getCollectorResult() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + if (vectorOperationsCount != null) { + builder.field(VECTOR_OPERATIONS_COUNT, vectorOperationsCount); + } builder.startArray(QUERY_ARRAY); for (ProfileResult p : queryProfileResults) { p.toXContent(builder, params); @@ -127,6 +148,7 @@ public static QueryProfileShardResult fromXContent(XContentParser parser) throws String currentFieldName = null; List queryProfileResults = new ArrayList<>(); long rewriteTime = 0; + Long vectorOperationsCount = null; CollectorResult collector = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -134,6 +156,8 @@ public static QueryProfileShardResult fromXContent(XContentParser parser) throws } else if (token.isValue()) { if (REWRITE_TIME.equals(currentFieldName)) { rewriteTime = parser.longValue(); + } else if (VECTOR_OPERATIONS_COUNT.equals(currentFieldName)) { + vectorOperationsCount = parser.longValue(); } else { parser.skipChildren(); } @@ -153,6 +177,6 @@ public static QueryProfileShardResult fromXContent(XContentParser parser) throws parser.skipChildren(); } } - return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector); + return new QueryProfileShardResult(queryProfileResults, rewriteTime, collector, vectorOperationsCount); } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java index 8cfbecc14ecf5..a40b1284238b2 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfiler.java @@ -31,10 +31,20 @@ public final class QueryProfiler extends AbstractProfiler This interface includes the declaration of an abstract method, profile(). Classes implementing this interface + * must provide an implementation for profile() to store profiling information in the {@link QueryProfiler}. + */ + +public interface ProfilingQuery { + + /** + * Store the profiling information in the {@link QueryProfiler} + * @param queryProfiler an instance of {@link KnnFloatVectorField}. + */ + void profile(QueryProfiler queryProfiler); +} diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java index f8c8d38e92805..f28425172ead5 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfileShardResultTests.java @@ -33,7 +33,9 @@ public static QueryProfileShardResult createTestItem() { if (randomBoolean()) { rewriteTime = rewriteTime % 1000; // make sure to often test this with small values too } - return new QueryProfileShardResult(queryProfileResults, rewriteTime, profileCollector); + + Long vectorOperationsCount = randomBoolean() ? null : randomNonNegativeLong(); + return new QueryProfileShardResult(queryProfileResults, rewriteTime, profileCollector, vectorOperationsCount); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java index 0bb170ed04430..474f891767081 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractKnnVectorQueryBuilderTestCase.java @@ -10,8 +10,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.KnnByteVectorQuery; -import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; @@ -101,13 +99,13 @@ protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query que Query knnQuery = ((VectorSimilarityQuery) query).getInnerKnnQuery(); assertThat(((VectorSimilarityQuery) query).getSimilarity(), equalTo(queryBuilder.getVectorSimilarity())); switch (elementType()) { - case FLOAT -> assertTrue(knnQuery instanceof KnnFloatVectorQuery); - case BYTE -> assertTrue(knnQuery instanceof KnnByteVectorQuery); + case FLOAT -> assertTrue(knnQuery instanceof ProfilingKnnFloatVectorQuery); + case BYTE -> assertTrue(knnQuery instanceof ProfilingKnnByteVectorQuery); } } else { switch (elementType()) { - case FLOAT -> assertTrue(query instanceof KnnFloatVectorQuery); - case BYTE -> assertTrue(query instanceof KnnByteVectorQuery); + case FLOAT -> assertTrue(query instanceof ProfilingKnnFloatVectorQuery); + case BYTE -> assertTrue(query instanceof ProfilingKnnByteVectorQuery); } } @@ -119,13 +117,13 @@ protected void doAssertLuceneQuery(KnnVectorQueryBuilder queryBuilder, Query que Query filterQuery = booleanQuery.clauses().isEmpty() ? null : booleanQuery; // The field should always be resolved to the concrete field Query knnVectorQueryBuilt = switch (elementType()) { - case BYTE -> new KnnByteVectorQuery( + case BYTE -> new ProfilingKnnByteVectorQuery( VECTOR_FIELD, getByteQueryVector(queryBuilder.queryVector()), queryBuilder.numCands(), filterQuery ); - case FLOAT -> new KnnFloatVectorQuery(VECTOR_FIELD, queryBuilder.queryVector(), queryBuilder.numCands(), filterQuery); + case FLOAT -> new ProfilingKnnFloatVectorQuery(VECTOR_FIELD, queryBuilder.queryVector(), queryBuilder.numCands(), filterQuery); }; if (query instanceof VectorSimilarityQuery vectorSimilarityQuery) { query = vectorSimilarityQuery.getInnerKnnQuery(); From adfd7f8cea388705ab7512c9be0ba4509357512b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 22 Nov 2023 07:09:32 +0000 Subject: [PATCH 072/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-175031da6ae --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 7450cad8fb9c2..69d3ad5aa5e17 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-6cd78318eab +lucene = 9.9.0-snapshot-175031da6ae bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 168e29ff3ef35..557954d2f0f12 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From c2bc2fb2a55301de2086af14a263cf2e17a8eadf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Thu, 23 Nov 2023 07:09:40 +0000 Subject: [PATCH 073/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-1138a4064e2 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 69d3ad5aa5e17..16fd63380b2a1 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-175031da6ae +lucene = 9.9.0-snapshot-1138a4064e2 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 557954d2f0f12..08bb26d001c92 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 581b6ed4085980fb585786469d14edf33f977c22 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 24 Nov 2023 07:09:51 +0000 Subject: [PATCH 074/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-ea8b6476ed3 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 16fd63380b2a1..53533a4d4c6d7 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-1138a4064e2 +lucene = 9.9.0-snapshot-ea8b6476ed3 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 08bb26d001c92..2b4c16529a6ed 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2639,124 +2639,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 954dee6f692666b90059e84d7eed2dabcccfd8ef Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 24 Nov 2023 15:11:07 +0100 Subject: [PATCH 075/263] Introduce transport version for the next lucene upgrade (#102587) --- .../main/java/org/elasticsearch/TransportVersions.java | 8 +++----- .../search/profile/query/QueryProfileShardResult.java | 4 ++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 631b501d0ce90..37547ca3ee3ce 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -178,11 +178,9 @@ static TransportVersion def(int id) { public static final TransportVersion GRANT_API_KEY_CLIENT_AUTHENTICATION_ADDED = def(8_545_00_0); public static final TransportVersion PIT_WITH_INDEX_FILTER = def(8_546_00_0); public static final TransportVersion NODE_INFO_VERSION_AS_STRING = def(8_547_00_0); - /* - * Transport versions added for features that require the next lucene minor version. - * Their id needs to be adjusted prior to merging lucene_snapshot into main. - */ - public static final TransportVersion VECTOR_OPS_COUNT_ADDED = def(8_900_00_0); + + // Placeholder for features that require the next lucene version. Its id needs to be adjusted when merging lucene_snapshot into main. + public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_900_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java index e779152890541..1b799983dd0a4 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryProfileShardResult.java @@ -72,7 +72,7 @@ public QueryProfileShardResult(StreamInput in) throws IOException { profileCollector = new CollectorResult(in); rewriteTime = in.readLong(); - vectorOperationsCount = (in.getTransportVersion().onOrAfter(TransportVersions.VECTOR_OPS_COUNT_ADDED)) + vectorOperationsCount = (in.getTransportVersion().onOrAfter(TransportVersions.UPGRADE_TO_LUCENE_9_9)) ? in.readOptionalLong() : null; } @@ -85,7 +85,7 @@ public void writeTo(StreamOutput out) throws IOException { } profileCollector.writeTo(out); out.writeLong(rewriteTime); - if (out.getTransportVersion().onOrAfter(TransportVersions.VECTOR_OPS_COUNT_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.UPGRADE_TO_LUCENE_9_9)) { out.writeOptionalLong(vectorOperationsCount); } } From fd8cbb6fd21dab139badd2a2888dd7a7dfef13da Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sat, 25 Nov 2023 07:08:59 +0000 Subject: [PATCH 076/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-02677650e19 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index f5694e349d1db..87efaaff84c6b 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-ea8b6476ed3 +lucene = 9.9.0-snapshot-02677650e19 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 73712f1697d12..b3b29ae69ceee 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From a183379b76886cfb4076efebd381bee874043af4 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Sun, 26 Nov 2023 07:09:07 +0000 Subject: [PATCH 077/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-c367ee3ea1a --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 87efaaff84c6b..326882aa2da26 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-02677650e19 +lucene = 9.9.0-snapshot-c367ee3ea1a bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index b3b29ae69ceee..72a0b727e709e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 74d3748a92be5c5c204a542ca45a6341305547b2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Mon, 27 Nov 2023 07:09:12 +0000 Subject: [PATCH 078/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-c367ee3ea1a --- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 72a0b727e709e..cb86d33c6ff57 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From c3b47a38b4d55ac4ea440aa61b697baf0cf6d76c Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 27 Nov 2023 14:04:34 +0100 Subject: [PATCH 079/263] spotless --- server/src/main/java/org/elasticsearch/TransportVersions.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 9cc0cdf5212fe..4e813eb2b5224 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -180,7 +180,6 @@ static TransportVersion def(int id) { public static final TransportVersion NODE_INFO_VERSION_AS_STRING = def(8_547_00_0); public static final TransportVersion GET_API_KEY_INVALIDATION_TIME_ADDED = def(8_548_00_0); - // Placeholder for features that require the next lucene version. Its id needs to be adjusted when merging lucene_snapshot into main. public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_900_00_0); From 96b20f28cd58c3d521c07755b4695af75837dec6 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 27 Nov 2023 14:06:35 -0500 Subject: [PATCH 080/263] Fix IndexDiskUsageAnalyzerTests with vectors (#102320) --- .../admin/indices/diskusage/IndexDiskUsageAnalyzer.java | 9 ++++++++- .../indices/diskusage/IndexDiskUsageAnalyzerTests.java | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index 6587bf27f604a..17b28ebbe3b4b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -532,7 +532,6 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I for (FieldInfo field : reader.getFieldInfos()) { cancellationChecker.checkForCancellation(); directory.resetBytesRead(); - final KnnCollector collector = new TopKnnCollector(100, Integer.MAX_VALUE); if (field.getVectorDimension() > 0) { switch (field.getVectorEncoding()) { case BYTE -> { @@ -543,6 +542,10 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I // do a couple of randomized searches to figure out min and max offsets of index file ByteVectorValues vectorValues = vectorReader.getByteVectorValues(field.name); + final KnnCollector collector = new TopKnnCollector( + Math.max(1, Math.min(100, vectorValues.size() - 1)), + Integer.MAX_VALUE + ); int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { @@ -562,6 +565,10 @@ void analyzeKnnVectors(SegmentReader reader, IndexDiskUsageStats stats) throws I // do a couple of randomized searches to figure out min and max offsets of index file FloatVectorValues vectorValues = vectorReader.getFloatVectorValues(field.name); + final KnnCollector collector = new TopKnnCollector( + Math.max(1, Math.min(100, vectorValues.size() - 1)), + Integer.MAX_VALUE + ); int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index dbbba6d325cd4..6c79946cce15f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -709,7 +709,7 @@ static void collectPerFieldStats(SegmentReader reader, IndexDiskUsageStats stats stats.addStoredField("_all_stored_fields", bytes); case TVX, TVD -> stats.addTermVectors("_all_vectors_fields", bytes); case NVD, NVM -> stats.addNorms("_all_norms_fields", bytes); - case VEM, VEC, VEX -> stats.addKnnVectors(fieldLookup.getVectorsField(file), bytes); + case VEM, VEMF, VEC, VEX, VEQ, VEMQ -> stats.addKnnVectors(fieldLookup.getVectorsField(file), bytes); } } } finally { From 129a30b7982aedb2fb9acd0a16c5af90c815c8c3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 28 Nov 2023 07:08:17 +0000 Subject: [PATCH 081/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-41da5c0b6a9 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 326882aa2da26..8a1a4f392f653 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-c367ee3ea1a +lucene = 9.9.0-snapshot-41da5c0b6a9 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index cb86d33c6ff57..d71182bbdeea1 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From 8502d7a3ad5549ee9609b543b95c6595fe10f718 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 29 Nov 2023 07:09:22 +0000 Subject: [PATCH 082/263] [Automated] Update Lucene snapshot to 9.9.0-snapshot-a6d788e1138 --- build-tools-internal/version.properties | 2 +- gradle/verification-metadata.xml | 144 ++++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 8a1a4f392f653..575d8310e9e24 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-41da5c0b6a9 +lucene = 9.9.0-snapshot-a6d788e1138 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d71182bbdeea1..15920b437ee9e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,124 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + From f00364aefd09d5c59ef5199218d0d46abf05c151 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 29 Nov 2023 12:29:55 -0500 Subject: [PATCH 083/263] Add byte quantization for float vectors in HNSW (#102093) Adds new `quantization_options` to `dense_vector`. This allows for vectors to be automatically quantized to `byte` when indexed. Example: ``` PUT vectors { "mappings": { "properties": { "my_vector": { "type": "dense_vector", "index": true, "index_options": { "type": "int8_hnsw" } } } } } ``` When querying, the query vector is automatically quantized and used when querying the HNSW graph. This reduces the memory required to only `25%` of what was previously required for `float` vectors at a slight loss of accuracy. This is currently only available when `index: true` and when using `hnsw` --- docs/reference/how-to/knn-search.asciidoc | 15 +- .../mapping/types/dense-vector.asciidoc | 47 ++- .../search-your-data/knn-search.asciidoc | 110 +++++- .../41_knn_search_byte_quantized.yml | 366 ++++++++++++++++++ .../vectors/DenseVectorFieldMapper.java | 153 ++++++-- .../vectors/DenseVectorFieldMapperTests.java | 79 +++- 6 files changed, 733 insertions(+), 37 deletions(-) create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml diff --git a/docs/reference/how-to/knn-search.asciidoc b/docs/reference/how-to/knn-search.asciidoc index 330847f5806de..066008ce26110 100644 --- a/docs/reference/how-to/knn-search.asciidoc +++ b/docs/reference/how-to/knn-search.asciidoc @@ -52,7 +52,12 @@ of datasets and configurations that we use for our nightly benchmarks. include::search-speed.asciidoc[tag=warm-fs-cache] The following file extensions are used for the approximate kNN search: -"vec" (for vector values), "vex" (for HNSW graph), "vem" (for metadata). ++ +-- +* `vec` and `veq` for vector values +* `vex` for HNSW graph +* `vem`, `vemf`, and `vemq` for metadata +-- [discrete] === Reduce vector dimensionality @@ -66,6 +71,14 @@ reduction techniques like PCA. When experimenting with different approaches, it's important to measure the impact on relevance to ensure the search quality is still acceptable. +[discrete] +=== Reduce vector memory foot-print + +The default <> is `float`. But this can be +automatically quantized during index time through <>. Quantization will +reduce the required memory by 4x, but it will also reduce the precision of the vectors. For `float` vectors with +`dim` greater than or equal to `384`, using a <> index is highly recommended. + [discrete] === Exclude vector fields from `_source` diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index 446e6c8ea4c43..a2ab44a173a62 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -111,6 +111,36 @@ PUT my-index-2 efficient kNN search. Like most kNN algorithms, HNSW is an approximate method that sacrifices result accuracy for improved speed. +[[dense-vector-quantization]] +==== Automatically quantize vectors for kNN search + +The `dense_vector` type supports quantization to reduce the memory footprint required when <> `float` vectors. +Currently the only quantization method supported is `int8` and provided vectors `element_type` must be `float`. To use +a quantized index, you can set your index type to `int8_hnsw`. + +When using the `int8_hnsw` index, each of the `float` vectors' dimensions are quantized to 1-byte integers. This can +reduce the memory footprint by as much as 75% at the cost of some accuracy. However, the disk usage can increase by +25% due to the overhead of storing the quantized and raw vectors. + +[source,console] +-------------------------------------------------- +PUT my-byte-quantized-index +{ + "mappings": { + "properties": { + "my_vector": { + "type": "dense_vector", + "dims": 3, + "index": true, + "index_options": { + "type": "int8_hnsw" + } + } + } + } +} +-------------------------------------------------- + [role="child_attributes"] [[dense-vector-params]] ==== Parameters for dense vector fields @@ -198,8 +228,7 @@ a distinct set of options. An optional section that configures the kNN indexing algorithm. The HNSW algorithm has two internal parameters that influence how the data structure is built. These can be adjusted to improve the accuracy of results, at the -expense of slower indexing speed. When `index_options` is provided, all of its -properties must be defined. +expense of slower indexing speed. + ^*^ This parameter can only be specified when `index` is `true`. + @@ -209,17 +238,25 @@ properties must be defined. ==== `type`::: (Required, string) -The type of kNN algorithm to use. Currently only `hnsw` is supported. +The type of kNN algorithm to use. Can be either `hnsw` or `int8_hnsw`. `m`::: -(Required, integer) +(Optional, integer) The number of neighbors each node will be connected to in the HNSW graph. Defaults to `16`. `ef_construction`::: -(Required, integer) +(Optional, integer) The number of candidates to track while assembling the list of nearest neighbors for each new node. Defaults to `100`. + +`confidence_interval`::: +(Optional, float) +Only applicable to `int8_hnsw` index types. The confidence interval to use when quantizing the vectors, +can be any value between and including `0.90` and `1.0`. This value restricts the values used when calculating +the quantization thresholds. For example, a value of `0.95` will only use the middle 95% of the values when +calculating the quantization thresholds (e.g. the highest and lowest 2.5% of values will be ignored). +Defaults to `1/(dims + 1)`. ==== [[dense-vector-synthetic-source]] diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index c39719f1a3b61..ff64535c705d9 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -242,6 +242,114 @@ POST byte-image-index/_search // TEST[s/"k": 10/"k": 3/] // TEST[s/"num_candidates": 100/"num_candidates": 3/] +[discrete] +[[knn-search-quantized-example]] +==== Byte quantized kNN search + +If you want to provide `float` vectors, but want the memory savings of `byte` vectors, you can use the +<> feature. Quantization allows you to provide `float` vectors, but +internally they are indexed as `byte` vectors. Additionally, the original `float` vectors are still retained +in the index. + +To use quantization, you can use the index type `int8_hnsw` object in the `dense_vector` mapping. + +[source,console] +---- +PUT quantized-image-index +{ + "mappings": { + "properties": { + "image-vector": { + "type": "dense_vector", + "element_type": "float", + "dims": 2, + "index": true, + "index_options": { + "type": "int8_hnsw" + } + }, + "title": { + "type": "text" + } + } + } +} +---- +// TEST[continued] + +. Index your `float` vectors. ++ +[source,console] +---- +POST quantized-image-index/_bulk?refresh=true +{ "index": { "_id": "1" } } +{ "image-vector": [0.1, -2], "title": "moose family" } +{ "index": { "_id": "2" } } +{ "image-vector": [0.75, -1], "title": "alpine lake" } +{ "index": { "_id": "3" } } +{ "image-vector": [1.2, 0.1], "title": "full moon" } +---- +//TEST[continued] + +. Run the search using the <>. When searching, the `float` vector is +automatically quantized to a `byte` vector. ++ +[source,console] +---- +POST quantized-image-index/_search +{ + "knn": { + "field": "image-vector", + "query_vector": [0.1, -2], + "k": 10, + "num_candidates": 100 + }, + "fields": [ "title" ] +} +---- +// TEST[continued] +// TEST[s/"k": 10/"k": 3/] +// TEST[s/"num_candidates": 100/"num_candidates": 3/] + +Since the original `float` vectors are still retained in the index, you can optionally use them for re-scoring. Meaning, +you can search over all the vectors quickly using the `int8_hnsw` index and then rescore only the top `k` results. This +provides the best of both worlds, fast search and accurate scoring. + +[source,console] +---- +POST quantized-image-index/_search +{ + "knn": { + "field": "image-vector", + "query_vector": [0.1, -2], + "k": 15, + "num_candidates": 100 + }, + "fields": [ "title" ], + "rescore": { + "window_size": 10, + "query": { + "rescore_query": { + "script_score": { + "query": { + "match_all": {} + }, + "script": { + "source": "cosineSimilarity(params.query_vector, 'image-vector') + 1.0", + "params": { + "query_vector": [0.1, -2] + } + } + } + } + } + } +} +---- +// TEST[continued] +// TEST[s/"k": 15/"k": 3/] +// TEST[s/"num_candidates": 100/"num_candidates": 3/] + [discrete] [[knn-search-filter-example]] ==== Filtered kNN search @@ -903,7 +1011,7 @@ the global top `k` matches across shards. You cannot set the To run an exact kNN search, use a `script_score` query with a vector function. . Explicitly map one or more `dense_vector` fields. If you don't intend to use -the field for approximate kNN, set the `index` mapping option to `false`. This +the field for approximate kNN, set the `index` mapping option to `false`. This can significantly improve indexing speed. + [source,console] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml new file mode 100644 index 0000000000000..f700664c43fc1 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -0,0 +1,366 @@ +setup: + - skip: + version: ' - 8.11.99' + reason: 'kNN float to byte quantization added in 8.12' + - do: + indices.create: + index: hnsw_byte_quantized + body: + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + index_options: + type: int8_hnsw + another_vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + index_options: + type: int8_hnsw + + - do: + index: + index: hnsw_byte_quantized + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + another_vector: [130.0, 115.0, -1.02, 15.555, -100.0] + + - do: + index: + index: hnsw_byte_quantized + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + another_vector: [-0.5, 50.0, -1, 1, 120] + + - do: + index: + index: hnsw_byte_quantized + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + another_vector: [-0.5, 11.0, 0, 12, 111.0] + + - do: + indices.refresh: {} + +--- +"kNN search only": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} +--- +"kNN multi-field search only": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + - {field: vector, query_vector: [-0.5, 90.0, -10, 14.8, -156.0], k: 2, num_candidates: 3} + - {field: another_vector, query_vector: [-0.5, 11.0, 0, 12, 111.0], k: 2, num_candidates: 3} + + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} +--- +"kNN search plus query": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + query: + term: + name: cow.jpg + + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.0.fields.name.0: "cow.jpg"} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + + - match: {hits.hits.2._id: "3"} + - match: {hits.hits.2.fields.name.0: "rabbit.jpg"} +--- +"kNN multi-field search with query": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + - {field: vector, query_vector: [-0.5, 90.0, -10, 14.8, -156.0], k: 2, num_candidates: 3} + - {field: another_vector, query_vector: [-0.5, 11.0, 0, 12, 111.0], k: 2, num_candidates: 3} + query: + term: + name: cow.jpg + + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - match: {hits.hits.1._id: "1"} + - match: {hits.hits.1.fields.name.0: "cow.jpg"} + + - match: {hits.hits.2._id: "2"} + - match: {hits.hits.2.fields.name.0: "moose.jpg"} +--- +"kNN search with filter": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + filter: + term: + name: "rabbit.jpg" + + - match: {hits.total.value: 1} + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + filter: + - term: + name: "rabbit.jpg" + - term: + _id: 2 + + - match: {hits.total.value: 0} + +--- +"KNN Vector similarity search only": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 10.3 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + + - length: {hits.hits: 1} + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} +--- +"Vector similarity with filter only": + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 11 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: {"term": {"name": "moose.jpg"}} + + - length: {hits.hits: 1} + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - do: + search: + index: hnsw_byte_quantized + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 110 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: {"term": {"name": "cow.jpg"}} + + - length: {hits.hits: 0} +--- +"Knn search with mip": + - do: + indices.create: + index: mip + body: + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: max_inner_product + index_options: + type: int8_hnsw + + - do: + index: + index: mip + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + + - do: + index: + index: mip + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + + - do: + index: + index: mip + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - do: + indices.refresh: {} + + - do: + search: + index: mip + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + + + - length: {hits.hits: 3} + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.2._id: "2"} + + - do: + search: + index: mip + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: { "term": { "name": "moose.jpg" } } + + + + - length: {hits.hits: 1} + - match: {hits.hits.0._id: "2"} +--- +"Cosine similarity with indexed vector": + - skip: + features: "headers" + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "cosineSimilarity(params.query_vector, 'vector')" + params: + query_vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "3"} + - gte: {hits.hits.0._score: 0.999} + - lte: {hits.hits.0._score: 1.001} + + - match: {hits.hits.1._id: "2"} + - gte: {hits.hits.1._score: 0.998} + - lte: {hits.hits.1._score: 1.0} + + - match: {hits.hits.2._id: "1"} + - gte: {hits.hits.2._score: 0.78} + - lte: {hits.hits.2._score: 0.791} +--- +"Test bad quantization parameters": + - do: + catch: bad_request + indices.create: + index: bad_hnsw_quantized + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + element_type: byte + index: true + index_options: + type: int8_hnsw + + - do: + catch: bad_request + indices.create: + index: bad_hnsw_quantized + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + index: false + index_options: + type: int8_hnsw diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index bd598b29e3717..dde2bcf06b0c7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -11,6 +11,7 @@ import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.KnnVectorsReader; import org.apache.lucene.codecs.KnnVectorsWriter; +import org.apache.lucene.codecs.lucene99.Lucene99HnswScalarQuantizedVectorsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; @@ -68,6 +69,7 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Stream; @@ -173,6 +175,13 @@ public Builder(String name, IndexVersion indexVersionCreated) { } } }); + this.indexOptions.addValidator(v -> { + if (v instanceof Int8HnswIndexOptions && elementType.getValue() == ElementType.BYTE) { + throw new IllegalArgumentException( + "[element_type] cannot be [byte] when using index type [" + VectorIndexType.INT8_HNSW.name + "]" + ); + } + }); } @Override @@ -702,26 +711,124 @@ private abstract static class IndexOptions implements ToXContent { IndexOptions(String type) { this.type = type; } + + abstract KnnVectorsFormat getVectorsFormat(); } - private static class HnswIndexOptions extends IndexOptions { + private enum VectorIndexType { + HNSW("hnsw") { + @Override + public IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { + Object mNode = indexOptionsMap.remove("m"); + Object efConstructionNode = indexOptionsMap.remove("ef_construction"); + if (mNode == null) { + mNode = Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; + } + if (efConstructionNode == null) { + efConstructionNode = Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; + } + int m = XContentMapValues.nodeIntegerValue(mNode); + int efConstruction = XContentMapValues.nodeIntegerValue(efConstructionNode); + MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); + return new HnswIndexOptions(m, efConstruction); + } + }, + INT8_HNSW("int8_hnsw") { + @Override + public IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { + Object mNode = indexOptionsMap.remove("m"); + Object efConstructionNode = indexOptionsMap.remove("ef_construction"); + Object confidenceIntervalNode = indexOptionsMap.remove("confidence_interval"); + if (mNode == null) { + mNode = Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; + } + if (efConstructionNode == null) { + efConstructionNode = Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; + } + int m = XContentMapValues.nodeIntegerValue(mNode); + int efConstruction = XContentMapValues.nodeIntegerValue(efConstructionNode); + Float confidenceInterval = null; + if (confidenceIntervalNode != null) { + confidenceInterval = (float) XContentMapValues.nodeDoubleValue(confidenceIntervalNode); + } + MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); + return new Int8HnswIndexOptions(m, efConstruction, confidenceInterval); + } + }; + + static Optional fromString(String type) { + return Stream.of(VectorIndexType.values()).filter(vectorIndexType -> vectorIndexType.name.equals(type)).findFirst(); + } + + private final String name; + + VectorIndexType(String name) { + this.name = name; + } + + abstract IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap); + } + + private static class Int8HnswIndexOptions extends IndexOptions { private final int m; private final int efConstruction; + private final Float confidenceInterval; - static IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { - Object mNode = indexOptionsMap.remove("m"); - Object efConstructionNode = indexOptionsMap.remove("ef_construction"); - if (mNode == null) { - throw new MapperParsingException("[index_options] of type [hnsw] requires field [m] to be configured"); - } - if (efConstructionNode == null) { - throw new MapperParsingException("[index_options] of type [hnsw] requires field [ef_construction] to be configured"); + private Int8HnswIndexOptions(int m, int efConstruction, Float confidenceInterval) { + super("int8_hnsw"); + this.m = m; + this.efConstruction = efConstruction; + this.confidenceInterval = confidenceInterval; + } + + @Override + public KnnVectorsFormat getVectorsFormat() { + return new Lucene99HnswScalarQuantizedVectorsFormat(m, efConstruction, 1, confidenceInterval, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.field("m", m); + builder.field("ef_construction", efConstruction); + if (confidenceInterval != null) { + builder.field("confidence_interval", confidenceInterval); } - int m = XContentMapValues.nodeIntegerValue(mNode); - int efConstruction = XContentMapValues.nodeIntegerValue(efConstructionNode); - MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); - return new HnswIndexOptions(m, efConstruction); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Int8HnswIndexOptions that = (Int8HnswIndexOptions) o; + return m == that.m && efConstruction == that.efConstruction && Objects.equals(confidenceInterval, that.confidenceInterval); + } + + @Override + public int hashCode() { + return Objects.hash(m, efConstruction, confidenceInterval); + } + + @Override + public String toString() { + return "{type=" + + type + + ", m=" + + m + + ", ef_construction=" + + efConstruction + + ", confidence_interval=" + + confidenceInterval + + "}"; } + } + + private static class HnswIndexOptions extends IndexOptions { + private final int m; + private final int efConstruction; private HnswIndexOptions(int m, int efConstruction) { super("hnsw"); @@ -729,6 +836,11 @@ private HnswIndexOptions(int m, int efConstruction) { this.efConstruction = efConstruction; } + @Override + public KnnVectorsFormat getVectorsFormat() { + return new Lucene99HnswVectorsFormat(m, efConstruction, 1, null); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -754,7 +866,7 @@ public int hashCode() { @Override public String toString() { - return "{type=" + type + ", m=" + m + ", ef_construction=" + efConstruction + " }"; + return "{type=" + type + ", m=" + m + ", ef_construction=" + efConstruction + "}"; } } @@ -1071,11 +1183,9 @@ private static IndexOptions parseIndexOptions(String fieldName, Object propNode) throw new MapperParsingException("[index_options] requires field [type] to be configured"); } String type = XContentMapValues.nodeStringValue(typeNode); - if (type.equals("hnsw")) { - return HnswIndexOptions.parseIndexOptions(fieldName, indexOptionsMap); - } else { - throw new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]"); - } + return VectorIndexType.fromString(type) + .orElseThrow(() -> new MapperParsingException("Unknown vector index options type [" + type + "] for field [" + fieldName + "]")) + .parseIndexOptions(fieldName, indexOptionsMap); } /** @@ -1083,12 +1193,11 @@ private static IndexOptions parseIndexOptions(String fieldName, Object propNode) * {@code null} if the default format should be used. */ public KnnVectorsFormat getKnnVectorsFormatForField(KnnVectorsFormat defaultFormat) { - KnnVectorsFormat format; + final KnnVectorsFormat format; if (indexOptions == null) { format = defaultFormat; } else { - HnswIndexOptions hnswIndexOptions = (HnswIndexOptions) indexOptions; - format = new Lucene99HnswVectorsFormat(hnswIndexOptions.m, hnswIndexOptions.efConstruction); + format = indexOptions.getVectorsFormat(); } // It's legal to reuse the same format name as this is the same on-disk format. return new KnnVectorsFormat(format.getName()) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 6c71a43e714fe..1e45ddaf9e8a7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -495,6 +495,11 @@ public void testInvalidParameters() { ); assertThat(e.getMessage(), containsString("[index_options] requires field [type] to be configured")); + e = expectThrows( + MapperParsingException.class, + () -> createDocumentMapper(fieldMapping(b -> b.field("type", "dense_vector").field("dims", 3).field("element_type", "foo"))) + ); + assertThat(e.getMessage(), containsString("invalid element_type [foo]; available types are ")); e = expectThrows( MapperParsingException.class, () -> createDocumentMapper( @@ -505,18 +510,35 @@ public void testInvalidParameters() { .field("index", true) .startObject("index_options") .field("type", "hnsw") - .field("ef_construction", 100) + .startObject("foo") + .endObject() .endObject() ) ) ); - assertThat(e.getMessage(), containsString("[index_options] of type [hnsw] requires field [m] to be configured")); - + assertThat( + e.getMessage(), + containsString("Failed to parse mapping: Mapping definition for [field] has unsupported parameters: [foo : {}]") + ); e = expectThrows( MapperParsingException.class, - () -> createDocumentMapper(fieldMapping(b -> b.field("type", "dense_vector").field("dims", 3).field("element_type", "bytes"))) + () -> createDocumentMapper( + fieldMapping( + b -> b.field("type", "dense_vector") + .field("dims", 3) + .field("element_type", "byte") + .field("similarity", "l2_norm") + .field("index", true) + .startObject("index_options") + .field("type", "int8_hnsw") + .endObject() + ) + ) + ); + assertThat( + e.getMessage(), + containsString("Failed to parse mapping: [element_type] cannot be [byte] when using index type [int8_hnsw]") ); - assertThat(e.getMessage(), containsString("invalid element_type [bytes]; available types are ")); } public void testInvalidParametersBeforeIndexedByDefault() { @@ -958,6 +980,8 @@ public void testFloatVectorQueryBoundaries() throws IOException { public void testKnnVectorsFormat() throws IOException { final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); + boolean setM = randomBoolean(); + boolean setEfConstruction = randomBoolean(); MapperService mapperService = createMapperService(fieldMapping(b -> { b.field("type", "dense_vector"); b.field("dims", 4); @@ -965,20 +989,59 @@ public void testKnnVectorsFormat() throws IOException { b.field("similarity", "dot_product"); b.startObject("index_options"); b.field("type", "hnsw"); + if (setM) { + b.field("m", m); + } + if (setEfConstruction) { + b.field("ef_construction", efConstruction); + } + b.endObject(); + })); + CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); + Codec codec = codecService.codec("default"); + assertThat(codec, instanceOf(PerFieldMapperCodec.class)); + KnnVectorsFormat knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); + String expectedString = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, maxConn=" + + (setM ? m : DEFAULT_MAX_CONN) + + ", beamWidth=" + + (setEfConstruction ? efConstruction : DEFAULT_BEAM_WIDTH) + + ", flatVectorFormat=Lucene99FlatVectorsFormat()" + + ")"; + assertEquals(expectedString, knnVectorsFormat.toString()); + } + + public void testKnnQuantizedHNSWVectorsFormat() throws IOException { + final int m = randomIntBetween(1, DEFAULT_MAX_CONN + 10); + final int efConstruction = randomIntBetween(1, DEFAULT_BEAM_WIDTH + 10); + boolean setConfidenceInterval = randomBoolean(); + float confidenceInterval = (float) randomDoubleBetween(0.90f, 1.0f, true); + MapperService mapperService = createMapperService(fieldMapping(b -> { + b.field("type", "dense_vector"); + b.field("dims", 4); + b.field("index", true); + b.field("similarity", "dot_product"); + b.startObject("index_options"); + b.field("type", "int8_hnsw"); b.field("m", m); b.field("ef_construction", efConstruction); + if (setConfidenceInterval) { + b.field("confidence_interval", confidenceInterval); + } b.endObject(); })); CodecService codecService = new CodecService(mapperService, BigArrays.NON_RECYCLING_INSTANCE); Codec codec = codecService.codec("default"); assertThat(codec, instanceOf(PerFieldMapperCodec.class)); KnnVectorsFormat knnVectorsFormat = ((PerFieldMapperCodec) codec).getKnnVectorsFormatForField("field"); - String expectedString = "Lucene99HnswVectorsFormat(name=Lucene99HnswVectorsFormat, maxConn=" + String expectedString = "Lucene99HnswScalarQuantizedVectorsFormat(name=Lucene99HnswScalarQuantizedVectorsFormat, maxConn=" + m + ", beamWidth=" + efConstruction - + ", flatVectorFormat=Lucene99FlatVectorsFormat()" - + ")"; + + ", flatVectorFormat=Lucene99ScalarQuantizedVectorsFormat(" + + "name=Lucene99ScalarQuantizedVectorsFormat, confidenceInterval=" + + (setConfidenceInterval ? confidenceInterval : null) + + ", rawVectorFormat=Lucene99FlatVectorsFormat()" + + "))"; assertEquals(expectedString, knnVectorsFormat.toString()); } From 0f394d06068765951b6babc583480c9b6964c4cd Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 29 Nov 2023 21:35:55 +0000 Subject: [PATCH 084/263] Use lucene 9.0.0 RC1 --- build-tools-internal/version.properties | 2 +- build.gradle | 5 + gradle/verification-metadata.xml | 142 +++++++++++------------- 3 files changed, 72 insertions(+), 77 deletions(-) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 575d8310e9e24..9763cef8aefeb 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.12.0 -lucene = 9.9.0-snapshot-a6d788e1138 +lucene = 9.9.0 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d diff --git a/build.gradle b/build.gradle index c0b613beefea4..4783868f4e0b2 100644 --- a/build.gradle +++ b/build.gradle @@ -195,6 +195,11 @@ if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || subprojects { proj -> apply plugin: 'elasticsearch.base' + + repositories { + // TODO: Temporary for Lucene RC builds. REMOVE + maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.9.0-RC1-rev-92a5e5b02e0e083126c4122f2b7a02426c21a037/lucene/maven" } + } } allprojects { diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d90d60bf701e1..5f2795e343162 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2659,124 +2659,114 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - - - - - - - - - - - + + + From 9559997c8fa299a9079500089e753f8b21223f55 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 29 Nov 2023 22:43:10 +0100 Subject: [PATCH 085/263] Add leak-tracked ref counting to MultiSearchResponse (#102479) Make the ref-counting leak tracked and release child items for MultiSearchResponse and adjust tests + REST action accordingly. --- .../mustache/MultiSearchTemplateResponse.java | 24 +++++---- .../action/search/MultiSearchResponse.java | 45 +++++++++++++++++ .../search/TransportMultiSearchAction.java | 10 +++- .../search/MultiSearchActionTookTests.java | 49 +++++++++---------- .../TransportMultiSearchActionTests.java | 15 +++--- .../action/support/ActionTestUtils.java | 8 +++ .../action/EnrichCoordinatorProxyAction.java | 29 +++++++++-- .../rest/RestFleetMultiSearchAction.java | 8 ++- .../integration/KibanaUserRoleIntegTests.java | 31 +++++++----- .../security/profile/ProfileServiceTests.java | 15 +++--- 10 files changed, 166 insertions(+), 68 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index ae4d3469f96c4..a26352eb3d8c7 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -170,18 +170,22 @@ static final class Fields { public static MultiSearchTemplateResponse fromXContext(XContentParser parser) { // The MultiSearchTemplateResponse is identical to the multi search response so we reuse the parsing logic in multi search response MultiSearchResponse mSearchResponse = MultiSearchResponse.fromXContext(parser); - org.elasticsearch.action.search.MultiSearchResponse.Item[] responses = mSearchResponse.getResponses(); - Item[] templateResponses = new Item[responses.length]; - int i = 0; - for (org.elasticsearch.action.search.MultiSearchResponse.Item item : responses) { - SearchTemplateResponse stResponse = null; - if (item.getResponse() != null) { - stResponse = new SearchTemplateResponse(); - stResponse.setResponse(item.getResponse()); + try { + org.elasticsearch.action.search.MultiSearchResponse.Item[] responses = mSearchResponse.getResponses(); + Item[] templateResponses = new Item[responses.length]; + int i = 0; + for (org.elasticsearch.action.search.MultiSearchResponse.Item item : responses) { + SearchTemplateResponse stResponse = null; + if (item.getResponse() != null) { + stResponse = new SearchTemplateResponse(); + stResponse.setResponse(item.getResponse()); + } + templateResponses[i++] = new Item(stResponse, item.getFailure()); } - templateResponses[i++] = new Item(stResponse, item.getFailure()); + return new MultiSearchTemplateResponse(templateResponses, mSearchResponse.getTook().millis()); + } finally { + mSearchResponse.decRef(); } - return new MultiSearchTemplateResponse(templateResponses, mSearchResponse.getTook().millis()); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java index 02b2b9b99e68f..aee631fb5d4cf 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java @@ -19,8 +19,11 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -134,19 +137,58 @@ public Exception getFailure() { private final Item[] items; private final long tookInMillis; + private final RefCounted refCounted = LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + for (int i = 0; i < items.length; i++) { + Item item = items[i]; + var r = item.response; + if (r != null) { + r.decRef(); + items[i] = null; + } + } + } + }); + public MultiSearchResponse(StreamInput in) throws IOException { super(in); items = in.readArray(Item::new, Item[]::new); tookInMillis = in.readVLong(); } + /** + * @param items individual search responses, the elements in this array are considered as owned by this instance for ref-counting + * purposes if their {@link Item#response} is non-null + */ public MultiSearchResponse(Item[] items, long tookInMillis) { this.items = items; this.tookInMillis = tookInMillis; } + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + @Override public Iterator iterator() { + assert hasReferences(); return Iterators.forArray(items); } @@ -154,6 +196,7 @@ public Iterator iterator() { * The list of responses, the order is the same as the one provided in the request. */ public Item[] getResponses() { + assert hasReferences(); return this.items; } @@ -166,12 +209,14 @@ public TimeValue getTook() { @Override public void writeTo(StreamOutput out) throws IOException { + assert hasReferences(); out.writeArray(items); out.writeVLong(tookInMillis); } @Override public Iterator toXContentChunked(ToXContent.Params params) { + assert hasReferences(); return Iterators.concat( ChunkedToXContentHelper.startObject(), Iterators.single((b, p) -> b.field("took", tookInMillis).startArray(Fields.RESPONSES)), diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index be892f0a0b982..a7d971069f96d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -186,9 +186,15 @@ private void handleResponse(final int responseSlot, final MultiSearchResponse.It } private void finish() { - listener.onResponse( - new MultiSearchResponse(responses.toArray(new MultiSearchResponse.Item[responses.length()]), buildTookInMillis()) + final var response = new MultiSearchResponse( + responses.toArray(new MultiSearchResponse.Item[responses.length()]), + buildTookInMillis() ); + try { + listener.onResponse(response); + } finally { + response.decRef(); + } } /** diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index 54f03fdb4ff2d..f1867b223760d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -96,20 +96,16 @@ private void runTestTook(boolean controlledClock) throws Exception { action.doExecute(mock(Task.class), multiSearchRequest, new ActionListener<>() { @Override public void onResponse(MultiSearchResponse multiSearchResponse) { - try { - if (controlledClock) { - assertThat( - TimeUnit.MILLISECONDS.convert(expected.get(), TimeUnit.NANOSECONDS), - equalTo(multiSearchResponse.getTook().getMillis()) - ); - } else { - assertThat( - multiSearchResponse.getTook().getMillis(), - greaterThanOrEqualTo(TimeUnit.MILLISECONDS.convert(expected.get(), TimeUnit.NANOSECONDS)) - ); - } - } finally { - multiSearchResponse.decRef(); + if (controlledClock) { + assertThat( + TimeUnit.MILLISECONDS.convert(expected.get(), TimeUnit.NANOSECONDS), + equalTo(multiSearchResponse.getTook().getMillis()) + ); + } else { + assertThat( + multiSearchResponse.getTook().getMillis(), + greaterThanOrEqualTo(TimeUnit.MILLISECONDS.convert(expected.get(), TimeUnit.NANOSECONDS)) + ); } } @@ -151,18 +147,21 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - listener.onResponse( - new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) + var resp = new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 0, + 0, + 0, + 0L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY ); + try { + listener.onResponse(resp); + } finally { + resp.decRef(); + } }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index 8233d772e0d2b..70bd2d9f00a05 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -35,6 +35,7 @@ import java.util.IdentityHashMap; import java.util.List; import java.util.Set; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -102,14 +103,14 @@ public String getLocalNodeId() { PlainActionFuture future = new PlainActionFuture<>(); action.execute(task, multiSearchRequest, future); - future.get().decRef(); + future.get(); assertEquals(numSearchRequests, counter.get()); } finally { assertTrue(ESTestCase.terminate(threadPool)); } } - public void testBatchExecute() { + public void testBatchExecute() throws ExecutionException, InterruptedException { // Initialize dependencies of TransportMultiSearchAction Settings settings = Settings.builder().put("node.name", TransportMultiSearchActionTests.class.getSimpleName()).build(); ActionFilters actionFilters = mock(ActionFilters.class); @@ -204,14 +205,14 @@ public String getLocalNodeId() { multiSearchRequest.add(new SearchRequest()); } - MultiSearchResponse response = ActionTestUtils.executeBlocking(action, multiSearchRequest); - try { + final PlainActionFuture future = new PlainActionFuture<>(); + ActionTestUtils.execute(action, multiSearchRequest, future.delegateFailure((l, response) -> { assertThat(response.getResponses().length, equalTo(numSearchRequests)); assertThat(requests.size(), equalTo(numSearchRequests)); assertThat(errorHolder.get(), nullValue()); - } finally { - response.decRef(); - } + l.onResponse(null); + })); + future.get(); } finally { assertTrue(ESTestCase.terminate(threadPool)); } diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java b/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java index 393c326e6fcf5..187a8b6e4eab2 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java @@ -65,6 +65,14 @@ public static v action.execute(task, request, listener); } + public static void execute( + TransportAction action, + Request request, + ActionListener listener + ) { + action.execute(request.createTask(1L, "direct", action.actionName, TaskId.EMPTY_TASK_ID, Map.of()), request, listener); + } + public static ActionListener assertNoFailureListener(CheckedConsumer consumer) { return ActionListener.wrap(consumer, ESTestCase::fail); } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java index 2a880c9a22cdd..5e0e7a6314d67 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java @@ -30,6 +30,7 @@ import java.util.ArrayList; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -238,13 +239,24 @@ static BiConsumer final List> enrichIndexRequestsAndSlots = entry.getValue(); ActionListener listener = ActionListener.wrap(response -> { shardResponses.put(enrichIndexName, new Tuple<>(response, null)); + response.incRef(); // will be released during reduce if (counter.incrementAndGet() == itemsPerIndex.size()) { - consumer.accept(reduce(request.requests().size(), itemsPerIndex, shardResponses), null); + var res = reduce(request.requests().size(), itemsPerIndex, shardResponses); + try { + consumer.accept(res, null); + } finally { + res.decRef(); + } } }, e -> { shardResponses.put(enrichIndexName, new Tuple<>(null, e)); if (counter.incrementAndGet() == itemsPerIndex.size()) { - consumer.accept(reduce(request.requests().size(), itemsPerIndex, shardResponses), null); + var res = reduce(request.requests().size(), itemsPerIndex, shardResponses); + try { + consumer.accept(res, null); + } finally { + res.decRef(); + } } }); @@ -261,14 +273,23 @@ static MultiSearchResponse reduce( Map> shardResponses ) { MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[numRequest]; - for (Map.Entry> rspEntry : shardResponses.entrySet()) { + for (Iterator>> iterator = shardResponses.entrySet() + .iterator(); iterator.hasNext();) { + Map.Entry> rspEntry = iterator.next(); List> reqSlots = itemsPerIndex.get(rspEntry.getKey()); if (rspEntry.getValue().v1() != null) { MultiSearchResponse shardResponse = rspEntry.getValue().v1(); for (int i = 0; i < shardResponse.getResponses().length; i++) { int slot = reqSlots.get(i).v1(); - items[slot] = shardResponse.getResponses()[i]; + var res = shardResponse.getResponses()[i]; + items[slot] = res; + var r = res.getResponse(); + if (r != null) { + r.incRef(); + } } + iterator.remove(); + shardResponse.decRef(); } else if (rspEntry.getValue().v2() != null) { Exception e = rspEntry.getValue().v2(); for (Tuple originSlot : reqSlots) { diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java index 29636436af578..c177bea2e63ca 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestFleetMultiSearchAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; -import org.elasticsearch.rest.action.RestChunkedToXContentListener; +import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; import org.elasticsearch.rest.action.search.RestMultiSearchAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.usage.SearchUsageHolder; @@ -112,7 +112,11 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli return channel -> { final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); - cancellableClient.execute(TransportMultiSearchAction.TYPE, multiSearchRequest, new RestChunkedToXContentListener<>(channel)); + cancellableClient.execute( + TransportMultiSearchAction.TYPE, + multiSearchRequest, + new RestRefCountedChunkedToXContentListener<>(channel) + ); }; } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index 4f15ff4c4fa46..afe9e68716579 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; +import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.settings.SecureString; @@ -23,7 +24,6 @@ import java.util.Map; import static java.util.Collections.singletonMap; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -110,17 +110,24 @@ public void testSearchAndMSearch() throws Exception { ).prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()).get(); assertEquals(response.getHits().getTotalHits().value, hits); - assertResponse( - client().prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())), - multiSearchResponse -> { - final long multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; - assertThat(hits, greaterThan(0L)); - multiSearchResponse = client().filterWithHeader( - singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) - ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); - assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); - } - ); + final long multiHits; + MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() + .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) + .get(); + try { + multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; + assertThat(hits, greaterThan(0L)); + } finally { + multiSearchResponse.decRef(); + } + multiSearchResponse = client().filterWithHeader( + singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) + ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); + try { + assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); + } finally { + multiSearchResponse.decRef(); + } } public void testGetIndex() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index 4471536308216..17f1268b7f5e8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -585,13 +585,16 @@ public void testSecurityProfileOrigin() { ); @SuppressWarnings("unchecked") final ActionListener listener = (ActionListener) invocation.getArguments()[2]; - listener.onResponse( - new MultiSearchResponse( - new MultiSearchResponse.Item[] { - new MultiSearchResponse.Item(SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY), null) }, - 1L - ) + var resp = new MultiSearchResponse( + new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY), null) }, + 1L ); + try { + listener.onResponse(resp); + } finally { + resp.decRef(); + } return null; }).when(client).execute(eq(TransportMultiSearchAction.TYPE), any(MultiSearchRequest.class), anyActionListener()); From 7b2479ce61f2a9f5b8c9e520237fa763f085a07e Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Wed, 29 Nov 2023 21:53:29 +0000 Subject: [PATCH 086/263] Fix gradle verification metadata --- gradle/verification-metadata.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5f2795e343162..9d6f8f21bc74a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,6 +2664,16 @@ + + + + + + + + + + From dc84d359dfaa485eb64f9e886657c009183d1467 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 29 Nov 2023 21:55:59 +0000 Subject: [PATCH 087/263] Update docs/changelog/102782.yaml --- docs/changelog/102782.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 docs/changelog/102782.yaml diff --git a/docs/changelog/102782.yaml b/docs/changelog/102782.yaml new file mode 100644 index 0000000000000..ed0a004765859 --- /dev/null +++ b/docs/changelog/102782.yaml @@ -0,0 +1,5 @@ +pr: 102782 +summary: Upgrade to Lucene 9.9.0 +area: Search +type: upgrade +issues: [] From 7cf32030e5e3324a2dde5ee46b30617cbcb20400 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 29 Nov 2023 18:18:09 -0800 Subject: [PATCH 088/263] Allow match field in enrich fields (#102734) It's perfectly fine for the match field of an enrich policy to be included in the enrich fields. However ESQL enrich consistently fails on such an enrich policy because it mistakenly excludes the match field from the enrich mapping during resolution. --- docs/changelog/102734.yaml | 5 ++ .../xpack/esql/analysis/Analyzer.java | 6 +- .../xpack/esql/analysis/AnalyzerTests.java | 25 +++++++ .../test/esql/62_extra_enrich.yml | 70 +++++++++++++++++++ 4 files changed, 104 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/102734.yaml create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml diff --git a/docs/changelog/102734.yaml b/docs/changelog/102734.yaml new file mode 100644 index 0000000000000..c27846d7d8478 --- /dev/null +++ b/docs/changelog/102734.yaml @@ -0,0 +1,5 @@ +pr: 102734 +summary: Allow match field in enrich fields +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 24a9076f69768..a8462703a2b37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -259,8 +259,10 @@ public static List calculateEnrichFields( List enrichFields, EnrichPolicy policy ) { - Map fieldMap = mapping.stream().collect(Collectors.toMap(NamedExpression::name, Function.identity())); - fieldMap.remove(policy.getMatchField()); + Set policyEnrichFieldSet = new HashSet<>(policy.getEnrichFields()); + Map fieldMap = mapping.stream() + .filter(e -> policyEnrichFieldSet.contains(e.name())) + .collect(Collectors.toMap(NamedExpression::name, Function.identity())); List result = new ArrayList<>(); if (enrichFields == null || enrichFields.isEmpty()) { // use the policy to infer the enrich fields diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 87c5310889023..8990433a5155d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -16,6 +16,9 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -50,6 +53,7 @@ import java.io.InputStream; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.IntStream; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -1383,6 +1387,27 @@ public void testEnrichExcludesPolicyKey() { assertThat(e.getMessage(), containsString("Unknown column [id]")); } + public void testEnrichFieldsIncludeMatchField() { + String query = """ + FROM test + | EVAL x = to_string(languages) + | ENRICH languages ON x + | KEEP language_name, language_code + """; + IndexResolution testIndex = loadMapping("mapping-basic.json", "test"); + IndexResolution languageIndex = loadMapping("mapping-languages.json", "languages"); + var enrichPolicy = new EnrichPolicy("match", null, List.of("unused"), "language_code", List.of("language_code", "language_name")); + EnrichResolution enrichResolution = new EnrichResolution( + Set.of(new EnrichPolicyResolution("languages", enrichPolicy, languageIndex)), + Set.of("languages") + ); + AnalyzerContext context = new AnalyzerContext(configuration(query), new EsqlFunctionRegistry(), testIndex, enrichResolution); + Analyzer analyzer = new Analyzer(context, TEST_VERIFIER); + LogicalPlan plan = analyze(query, analyzer); + var limit = as(plan, Limit.class); + assertThat(Expressions.names(limit.output()), contains("language_name", "language_code")); + } + public void testChainedEvalFieldsUse() { var query = "from test | eval x0 = pow(salary, 1), x1 = pow(x0, 2), x2 = pow(x1, 3)"; int additionalEvals = randomIntBetween(0, 5); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml new file mode 100644 index 0000000000000..5f1112197f383 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/62_extra_enrich.yml @@ -0,0 +1,70 @@ +--- +"Enrich fields includes match field": + - skip: + version: " - 8.11.99" + reason: "enrich match field was mistakenly excluded in 8.11" + - do: + indices.create: + index: departments + body: + mappings: + properties: + name: + type: keyword + employees: + type: integer + + - do: + bulk: + index: departments + refresh: true + body: + - { "index": { } } + - { "name": "engineering", "employees": 1024 } + - { "index": { } } + - { "name": "marketing", "employees": 56 } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: departments-policy + body: + match: + indices: [ "departments" ] + match_field: "name" + enrich_fields: [ "name", "employees" ] + + - do: + enrich.execute_policy: + name: departments-policy + - do: + esql.query: + body: + query: 'ROW name="engineering" | ENRICH departments-policy | LIMIT 10 | KEEP name, employees' + + - match: { columns.0.name: "name" } + - match: { columns.0.type: "keyword" } + - match: { columns.1.name: "employees" } + - match: { columns.1.type: "integer" } + + - length: { values: 1 } + - match: { values.0.0: "engineering" } + - match: { values.0.1: 1024 } + + - do: + esql.query: + body: + query: 'ROW name="sales" | ENRICH departments-policy ON name WITH department=name | WHERE name==department | KEEP name, department | LIMIT 10' + + - match: { columns.0.name: "name" } + - match: { columns.0.type: "keyword" } + - match: { columns.1.name: "department" } + - match: { columns.1.type: "keyword" } + - length: { values: 0 } + + - do: + enrich.delete_policy: + name: departments-policy From 824d06c8cf7fd4811a0ff42e3554bc70dbfb7e0a Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 29 Nov 2023 20:21:07 -0800 Subject: [PATCH 089/263] Use random purpose in blob store repository tests (#102789) Today many blob store repository tests specify that the operations they perform have purpose `OperationPurpose#SNAPSHOT`, but most of these tests do not care about the purpose of these operations. This commit switches them to using a random purpose to highlight that the purpose is unimportant to the test. --- .../azure/AzureBlobStoreRepositoryTests.java | 25 ++++---- .../AzureStorageCleanupThirdPartyTests.java | 6 +- .../azure/AzureBlobContainerRetriesTests.java | 26 ++++---- .../azure/AzureSasTokenTests.java | 4 +- ...eCloudStorageBlobStoreRepositoryTests.java | 8 +-- ...CloudStorageBlobContainerRetriesTests.java | 14 ++--- ...leCloudStorageBlobStoreContainerTests.java | 4 +- .../s3/S3BlobStoreRepositoryTests.java | 6 +- .../s3/S3RepositoryThirdPartyTests.java | 12 ++-- .../s3/S3BlobContainerRetriesTests.java | 11 ++-- .../s3/S3BlobStoreContainerTests.java | 34 +++-------- .../s3/S3RetryingInputStreamTests.java | 6 +- .../url/AbstractURLBlobStoreTests.java | 8 +-- .../blobstore/url/FileURLBlobStoreTests.java | 4 +- .../blobstore/url/HttpURLBlobStoreTests.java | 13 ++-- .../hdfs/HdfsBlobStoreContainerTests.java | 26 ++++---- .../BlobStoreRepositoryCleanupIT.java | 6 +- .../blobstore/fs/FsBlobContainerTests.java | 45 ++++++-------- .../blobstore/BlobStoreRepositoryTests.java | 4 +- .../snapshots/BlobStoreFormatTests.java | 10 ++-- .../AbstractThirdPartyRepositoryTestCase.java | 42 ++++--------- .../AbstractBlobContainerRetriesTestCase.java | 26 ++++---- .../blobstore/BlobStoreTestUtil.java | 50 ++++++++-------- .../ESBlobStoreRepositoryIntegTestCase.java | 60 +++++++++---------- .../ESFsBasedRepositoryIntegTestCase.java | 4 +- 25 files changed, 196 insertions(+), 258 deletions(-) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 4cbf40849cbe9..f5c1912d15251 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.MockSecureSettings; @@ -45,6 +44,7 @@ import java.util.function.Predicate; import java.util.regex.Pattern; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -235,11 +235,11 @@ public void testLargeBlobCountDeletion() throws Exception { for (int i = 0; i < numberOfBlobs; i++) { byte[] bytes = randomBytes(randomInt(100)); String blobName = randomAlphaOfLength(10); - container.writeBlob(OperationPurpose.SNAPSHOT, blobName, new BytesArray(bytes), false); + container.writeBlob(randomPurpose(), blobName, new BytesArray(bytes), false); } - container.delete(OperationPurpose.SNAPSHOT); - assertThat(container.listBlobs(OperationPurpose.SNAPSHOT), is(anEmptyMap())); + container.delete(randomPurpose()); + assertThat(container.listBlobs(randomPurpose()), is(anEmptyMap())); } } @@ -250,7 +250,7 @@ public void testDeleteBlobsIgnoringIfNotExists() throws Exception { for (int i = 0; i < 10; i++) { byte[] bytes = randomBytes(randomInt(100)); String blobName = randomAlphaOfLength(10); - container.writeBlob(OperationPurpose.SNAPSHOT, blobName, new BytesArray(bytes), false); + container.writeBlob(randomPurpose(), blobName, new BytesArray(bytes), false); blobsToDelete.add(blobName); } @@ -260,18 +260,15 @@ public void testDeleteBlobsIgnoringIfNotExists() throws Exception { } Randomness.shuffle(blobsToDelete); - container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobsToDelete.iterator()); - assertThat(container.listBlobs(OperationPurpose.SNAPSHOT), is(anEmptyMap())); + container.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobsToDelete.iterator()); + assertThat(container.listBlobs(randomPurpose()), is(anEmptyMap())); } } public void testNotFoundErrorMessageContainsFullKey() throws Exception { try (BlobStore store = newBlobStore()) { BlobContainer container = store.blobContainer(BlobPath.EMPTY.add("nested").add("dir")); - NoSuchFileException exception = expectThrows( - NoSuchFileException.class, - () -> container.readBlob(OperationPurpose.SNAPSHOT, "blob") - ); + NoSuchFileException exception = expectThrows(NoSuchFileException.class, () -> container.readBlob(randomPurpose(), "blob")); assertThat(exception.getMessage(), containsString("nested/dir/blob] not found")); } } @@ -281,10 +278,10 @@ public void testReadByteByByte() throws Exception { BlobContainer container = store.blobContainer(BlobPath.EMPTY.add(UUIDs.randomBase64UUID())); var data = randomBytes(randomIntBetween(128, 512)); String blobName = randomName(); - container.writeBlob(OperationPurpose.SNAPSHOT, blobName, new ByteArrayInputStream(data), data.length, true); + container.writeBlob(randomPurpose(), blobName, new ByteArrayInputStream(data), data.length, true); var originalDataInputStream = new ByteArrayInputStream(data); - try (var azureInputStream = container.readBlob(OperationPurpose.SNAPSHOT, blobName)) { + try (var azureInputStream = container.readBlob(randomPurpose(), blobName)) { for (int i = 0; i < data.length; i++) { assertThat(originalDataInputStream.read(), is(equalTo(azureInputStream.read()))); } @@ -292,7 +289,7 @@ public void testReadByteByByte() throws Exception { assertThat(azureInputStream.read(), is(equalTo(-1))); assertThat(originalDataInputStream.read(), is(equalTo(-1))); } - container.delete(OperationPurpose.SNAPSHOT); + container.delete(randomPurpose()); } } } diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java index 64f20453e1cee..052b558a05a38 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.Settings; @@ -36,6 +35,7 @@ import java.net.HttpURLConnection; import java.util.Collection; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.blankOrNullString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -140,13 +140,13 @@ public void testMultiBlockUpload() throws Exception { repo.threadPool().generic().execute(ActionRunnable.run(future, () -> { final BlobContainer blobContainer = repo.blobStore().blobContainer(repo.basePath().add("large_write")); blobContainer.writeBlob( - OperationPurpose.SNAPSHOT, + randomPurpose(), UUIDs.base64UUID(), new ByteArrayInputStream(randomByteArrayOfLength(blobSize)), blobSize, false ); - blobContainer.delete(OperationPurpose.SNAPSHOT); + blobContainer.delete(randomPurpose()); })); future.get(); } diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java index f5c1d0b8ac00b..3cc56c949e852 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; @@ -43,6 +42,7 @@ import java.util.stream.Collectors; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase.randomBytes; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -61,11 +61,11 @@ public void testReadNonexistentBlobThrowsNoSuchFileException() { final BlobContainer blobContainer = createBlobContainer(between(1, 5)); final Exception exception = expectThrows(NoSuchFileException.class, () -> { if (randomBoolean()) { - blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob"); + blobContainer.readBlob(randomPurpose(), "read_nonexistent_blob"); } else { final long position = randomLongBetween(0, MAX_RANGE_VAL - 1L); final long length = randomLongBetween(1, MAX_RANGE_VAL - position); - blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob", position, length); + blobContainer.readBlob(randomPurpose(), "read_nonexistent_blob", position, length); } }); assertThat(exception.toString(), exception.getMessage().toLowerCase(Locale.ROOT), containsString("not found")); @@ -112,7 +112,7 @@ public void testReadBlobWithRetries() throws Exception { }); final BlobContainer blobContainer = createBlobContainer(maxRetries); - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_blob_max_retries")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); assertThat(countDownHead.isCountedDown(), is(true)); assertThat(countDownGet.isCountedDown(), is(true)); @@ -160,7 +160,7 @@ public void testReadRangeBlobWithRetries() throws Exception { final BlobContainer blobContainer = createBlobContainer(maxRetries); final int position = randomIntBetween(0, bytes.length - 1); final int length = randomIntBetween(1, bytes.length - position); - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_range_blob_max_retries", position, length)) { + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_range_blob_max_retries", position, length)) { final byte[] bytesRead = BytesReference.toBytes(Streams.readFully(inputStream)); assertArrayEquals(Arrays.copyOfRange(bytes, position, Math.min(bytes.length, position + length)), bytesRead); assertThat(countDownGet.isCountedDown(), is(true)); @@ -203,7 +203,7 @@ public void testWriteBlobWithRetries() throws Exception { final BlobContainer blobContainer = createBlobContainer(maxRetries); try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_max_retries", stream, bytes.length, false); + blobContainer.writeBlob(randomPurpose(), "write_blob_max_retries", stream, bytes.length, false); } assertThat(countDown.isCountedDown(), is(true)); } @@ -273,7 +273,7 @@ public void testWriteLargeBlob() throws Exception { final BlobContainer blobContainer = createBlobContainer(maxRetries); try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", data), data.length)) { - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_large_blob", stream, data.length, false); + blobContainer.writeBlob(randomPurpose(), "write_large_blob", stream, data.length, false); } assertThat(countDownUploads.get(), equalTo(0)); @@ -341,7 +341,7 @@ public void testWriteLargeBlobStreaming() throws Exception { }); final BlobContainer blobContainer = createBlobContainer(maxRetries); - blobContainer.writeMetadataBlob(OperationPurpose.SNAPSHOT, "write_large_blob_streaming", false, randomBoolean(), out -> { + blobContainer.writeMetadataBlob(randomPurpose(), "write_large_blob_streaming", false, randomBoolean(), out -> { int outstanding = data.length; while (outstanding > 0) { if (randomBoolean()) { @@ -391,13 +391,7 @@ public void reset() {} }) { final IOException ioe = expectThrows( IOException.class, - () -> blobContainer.writeBlob( - OperationPurpose.SNAPSHOT, - "write_blob_max_retries", - stream, - randomIntBetween(1, 128), - randomBoolean() - ) + () -> blobContainer.writeBlob(randomPurpose(), "write_blob_max_retries", stream, randomIntBetween(1, 128), randomBoolean()) ); assertThat(ioe.getMessage(), is("Unable to write blob write_blob_max_retries")); // The mock http server uses 1 thread to process the requests, it's possible that the @@ -471,7 +465,7 @@ public void testRetryFromSecondaryLocationPolicies() throws Exception { } final BlobContainer blobContainer = createBlobContainer(maxRetries, secondaryHost, locationMode); - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_from_secondary")) { + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_blob_from_secondary")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); // It does round robin, first tries on the primary, then on the secondary diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSasTokenTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSasTokenTests.java index cfc4e17949771..ec74918f601cc 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSasTokenTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSasTokenTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.MockSecureSettings; @@ -24,6 +23,7 @@ import static org.elasticsearch.repositories.azure.AzureStorageSettings.ACCOUNT_SETTING; import static org.elasticsearch.repositories.azure.AzureStorageSettings.SAS_TOKEN_SETTING; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; @@ -77,7 +77,7 @@ public void testSasTokenIsUsedAsProvidedInSettings() throws Exception { }); final BlobContainer blobContainer = createBlobContainer(maxRetries, null, LocationMode.PRIMARY_ONLY, clientName, secureSettings); - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "sas_test")) { + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "sas_test")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); } } diff --git a/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index b0eafb3bc37ab..87449d7153057 100644 --- a/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.Streams; @@ -59,6 +58,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.TOKEN_URI_SETTING; @@ -132,7 +132,7 @@ public void testDeleteSingleItem() { f, () -> repository.blobStore() .blobContainer(repository.basePath()) - .deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, Iterators.single("foo")) + .deleteBlobsIgnoringIfNotExists(randomPurpose(), Iterators.single("foo")) ) ) ); @@ -198,7 +198,7 @@ public void testWriteReadLarge() throws IOException { random().nextBytes(data); writeBlob(container, "foobar", new BytesArray(data), false); } - try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, "foobar")) { + try (InputStream stream = container.readBlob(randomPurpose(), "foobar")) { BytesRefBuilder target = new BytesRefBuilder(); while (target.length() < data.length) { byte[] buffer = new byte[scaledRandomIntBetween(1, data.length - target.length())]; @@ -209,7 +209,7 @@ public void testWriteReadLarge() throws IOException { assertEquals(data.length, target.length()); assertArrayEquals(data, Arrays.copyOfRange(target.bytes(), 0, target.length())); } - container.delete(OperationPurpose.SNAPSHOT); + container.delete(randomPurpose()); } } diff --git a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java index d23d9385ab1a2..673499e4b2461 100644 --- a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java +++ b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; @@ -64,6 +63,7 @@ import static fixture.gcs.GoogleCloudStorageHttpHandler.parseMultipartRequestBody; import static fixture.gcs.TestUtils.createServiceAccount; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase.randomBytes; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageBlobStore.MAX_DELETES_PER_BATCH; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; @@ -188,7 +188,7 @@ public void testReadLargeBlobWithRetries() throws Exception { exchange.close(); }); - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "large_blob_retries")) { + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "large_blob_retries")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); } } @@ -231,7 +231,7 @@ public void testWriteBlobWithRetries() throws Exception { })); try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_max_retries", stream, bytes.length, false); + blobContainer.writeBlob(randomPurpose(), "write_blob_max_retries", stream, bytes.length, false); } assertThat(countDown.isCountedDown(), is(true)); } @@ -254,7 +254,7 @@ public void testWriteBlobWithReadTimeouts() { Exception exception = expectThrows(StorageException.class, () -> { try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_timeout", stream, bytes.length, false); + blobContainer.writeBlob(randomPurpose(), "write_blob_timeout", stream, bytes.length, false); } }); assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); @@ -392,10 +392,10 @@ public void testWriteLargeBlob() throws IOException { if (randomBoolean()) { try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", data), data.length)) { - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_large_blob", stream, data.length, false); + blobContainer.writeBlob(randomPurpose(), "write_large_blob", stream, data.length, false); } } else { - blobContainer.writeMetadataBlob(OperationPurpose.SNAPSHOT, "write_large_blob", false, randomBoolean(), out -> out.write(data)); + blobContainer.writeMetadataBlob(randomPurpose(), "write_large_blob", false, randomBoolean(), out -> out.write(data)); } assertThat(countInits.get(), equalTo(0)); @@ -452,7 +452,7 @@ public String next() { exchange.getResponseBody().write(response); })); - blobContainer.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNamesIterator); + blobContainer.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobNamesIterator); // Ensure that the remaining deletes are sent in the last batch if (pendingDeletes.get() > 0) { diff --git a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java index e38347ad30292..5a950ad2a9ecc 100644 --- a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java +++ b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.ESTestCase; @@ -27,6 +26,7 @@ import java.util.Arrays; import java.util.List; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -93,7 +93,7 @@ public void testDeleteBlobsIgnoringIfNotExistsThrowsIOException() throws Excepti IOException e = expectThrows( IOException.class, - () -> container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobs.iterator()) + () -> container.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobs.iterator()) ); assertThat(e.getCause(), instanceOf(StorageException.class)); } diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 7f46440647a54..c0d2f9e1ed6f9 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.RepositoryStats; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; import org.elasticsearch.repositories.blobstore.ESMockAPIBasedRepositoryIntegTestCase; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.SnapshotId; @@ -78,6 +79,7 @@ import java.util.stream.StreamSupport; import static org.elasticsearch.repositories.RepositoriesModule.METRIC_REQUESTS_COUNT; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.allOf; @@ -317,7 +319,7 @@ public void testRequestStatsWithOperationPurposes() throws IOException { assertThat(initialStats.keySet(), equalTo(allOperations)); // Collect more stats with an operation purpose other than the default - final OperationPurpose purpose = randomValueOtherThan(OperationPurpose.SNAPSHOT, () -> randomFrom(OperationPurpose.values())); + final OperationPurpose purpose = randomValueOtherThan(OperationPurpose.SNAPSHOT, BlobStoreTestUtil::randomPurpose); final BlobPath blobPath = repository.basePath().add(randomAlphaOfLength(10)); final BlobContainer blobContainer = blobStore.blobContainer(blobPath); final BytesArray whatToWrite = new BytesArray(randomByteArrayOfLength(randomIntBetween(100, 1000))); @@ -394,7 +396,7 @@ public void testEnforcedCooldownPeriod() throws IOException { () -> repository.blobStore() .blobContainer(repository.basePath()) .writeBlobAtomic( - OperationPurpose.SNAPSHOT, + randomPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + modifiedRepositoryData.getGenId(), serialized, true diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java index b11120e068d14..1e2ff831b8e49 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -46,6 +45,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.blankOrNullString; import static org.hamcrest.Matchers.equalTo; @@ -161,7 +161,7 @@ public long absoluteTimeInMillis() { class TestHarness { boolean tryCompareAndSet(BytesReference expected, BytesReference updated) { return PlainActionFuture.get( - future -> blobContainer.compareAndSetRegister(OperationPurpose.SNAPSHOT, "key", expected, updated, future), + future -> blobContainer.compareAndSetRegister(randomPurpose(), "key", expected, updated, future), 10, TimeUnit.SECONDS ); @@ -169,11 +169,7 @@ boolean tryCompareAndSet(BytesReference expected, BytesReference updated) { BytesReference readRegister() { return PlainActionFuture.get( - future -> blobContainer.getRegister( - OperationPurpose.SNAPSHOT, - "key", - future.map(OptionalBytesReference::bytesReference) - ), + future -> blobContainer.getRegister(randomPurpose(), "key", future.map(OptionalBytesReference::bytesReference)), 10, TimeUnit.SECONDS ); @@ -220,7 +216,7 @@ List listMultipartUploads() { assertThat(testHarness.listMultipartUploads(), hasSize(0)); assertEquals(bytes2, testHarness.readRegister()); } finally { - blobContainer.delete(OperationPurpose.SNAPSHOT); + blobContainer.delete(randomPurpose()); } } finally { ThreadPool.terminate(threadpool, 10, TimeUnit.SECONDS); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 3875181f98ece..a8a6d71928795 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -55,6 +55,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.repositories.s3.S3ClientSettings.DISABLE_CHUNKED_ENCODING; import static org.elasticsearch.repositories.s3.S3ClientSettings.ENDPOINT_SETTING; import static org.elasticsearch.repositories.s3.S3ClientSettings.MAX_RETRIES_SETTING; @@ -216,7 +217,7 @@ public void testWriteBlobWithRetries() throws Exception { } }); try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_max_retries", stream, bytes.length, false); + blobContainer.writeBlob(randomPurpose(), "write_blob_max_retries", stream, bytes.length, false); } assertThat(countDown.isCountedDown(), is(true)); } @@ -239,7 +240,7 @@ public void testWriteBlobWithReadTimeouts() { Exception exception = expectThrows(IOException.class, () -> { try (InputStream stream = new InputStreamIndexInput(new ByteArrayIndexInput("desc", bytes), bytes.length)) { - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_blob_timeout", stream, bytes.length, false); + blobContainer.writeBlob(randomPurpose(), "write_blob_timeout", stream, bytes.length, false); } }); assertThat( @@ -345,7 +346,7 @@ public void testWriteLargeBlob() throws Exception { } }); - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, "write_large_blob", new ZeroInputStream(blobSize), blobSize, false); + blobContainer.writeBlob(randomPurpose(), "write_large_blob", new ZeroInputStream(blobSize), blobSize, false); assertThat(countDownInitiate.isCountedDown(), is(true)); assertThat(countDownUploads.get(), equalTo(0)); @@ -443,7 +444,7 @@ public void testWriteLargeBlobStreaming() throws Exception { } }); - blobContainer.writeMetadataBlob(OperationPurpose.SNAPSHOT, "write_large_blob_streaming", false, randomBoolean(), out -> { + blobContainer.writeMetadataBlob(randomPurpose(), "write_large_blob_streaming", false, randomBoolean(), out -> { final byte[] buffer = new byte[16 * 1024]; long outstanding = blobSize; while (outstanding > 0) { @@ -518,7 +519,7 @@ public void handle(HttpExchange exchange) throws IOException { httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_max_retries"), new FlakyReadHandler()); - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_blob_max_retries")) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java index 9ae2589759d3f..fbbcfa475da44 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStoreException; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; @@ -40,6 +39,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; @@ -59,7 +59,7 @@ public void testExecuteSingleUploadBlobSizeTooLarge() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> blobContainer.executeSingleUpload(OperationPurpose.SNAPSHOT, blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) + () -> blobContainer.executeSingleUpload(randomPurpose(), blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) ); assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage()); } @@ -74,7 +74,7 @@ public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> blobContainer.executeSingleUpload( - OperationPurpose.SNAPSHOT, + randomPurpose(), blobStore, blobName, new ByteArrayInputStream(new byte[0]), @@ -121,7 +121,7 @@ public void testExecuteSingleUpload() throws IOException { when(client.putObject(argumentCaptor.capture())).thenReturn(new PutObjectResult()); final ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[blobSize]); - blobContainer.executeSingleUpload(OperationPurpose.SNAPSHOT, blobStore, blobName, inputStream, blobSize); + blobContainer.executeSingleUpload(randomPurpose(), blobStore, blobName, inputStream, blobSize); final PutObjectRequest request = argumentCaptor.getValue(); assertEquals(bucketName, request.getBucketName()); @@ -142,13 +142,7 @@ public void testExecuteMultipartUploadBlobSizeTooLarge() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> blobContainer.executeMultipartUpload( - OperationPurpose.SNAPSHOT, - blobStore, - randomAlphaOfLengthBetween(1, 10), - null, - blobSize - ) + () -> blobContainer.executeMultipartUpload(randomPurpose(), blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) ); assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage()); } @@ -160,13 +154,7 @@ public void testExecuteMultipartUploadBlobSizeTooSmall() { final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> blobContainer.executeMultipartUpload( - OperationPurpose.SNAPSHOT, - blobStore, - randomAlphaOfLengthBetween(1, 10), - null, - blobSize - ) + () -> blobContainer.executeMultipartUpload(randomPurpose(), blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) ); assertEquals("Multipart upload request size [" + blobSize + "] can't be smaller than 5mb", e.getMessage()); } @@ -230,7 +218,7 @@ public void testExecuteMultipartUpload() throws IOException { final ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[0]); final S3BlobContainer blobContainer = new S3BlobContainer(blobPath, blobStore); - blobContainer.executeMultipartUpload(OperationPurpose.SNAPSHOT, blobStore, blobName, inputStream, blobSize); + blobContainer.executeMultipartUpload(randomPurpose(), blobStore, blobName, inputStream, blobSize); final InitiateMultipartUploadRequest initRequest = initArgCaptor.getValue(); assertEquals(bucketName, initRequest.getBucketName()); @@ -336,13 +324,7 @@ public void testExecuteMultipartUploadAborted() { final IOException e = expectThrows(IOException.class, () -> { final S3BlobContainer blobContainer = new S3BlobContainer(BlobPath.EMPTY, blobStore); - blobContainer.executeMultipartUpload( - OperationPurpose.SNAPSHOT, - blobStore, - blobName, - new ByteArrayInputStream(new byte[0]), - blobSize - ); + blobContainer.executeMultipartUpload(randomPurpose(), blobStore, blobName, new ByteArrayInputStream(new byte[0]), blobSize); }); assertEquals("Unable to upload object [" + blobName + "] using multipart upload", e.getMessage()); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java index d8366236a8184..f43fb8cfa4ed3 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RetryingInputStreamTests.java @@ -14,7 +14,6 @@ import com.amazonaws.services.s3.model.S3ObjectInputStream; import org.apache.http.client.methods.HttpGet; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.io.Streams; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; @@ -23,6 +22,7 @@ import java.io.IOException; import java.util.Arrays; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -94,11 +94,11 @@ private S3RetryingInputStream createInputStream(final byte[] data, @Nullable fin if (position != null && length != null) { s3Object.getObjectMetadata().setContentLength(length); s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(data, position, length), new HttpGet())); - return new S3RetryingInputStream(OperationPurpose.SNAPSHOT, blobStore, "_blob", position, Math.addExact(position, length - 1)); + return new S3RetryingInputStream(randomPurpose(), blobStore, "_blob", position, Math.addExact(position, length - 1)); } else { s3Object.getObjectMetadata().setContentLength(data.length); s3Object.setObjectContent(new S3ObjectInputStream(new ByteArrayInputStream(data), new HttpGet())); - return new S3RetryingInputStream(OperationPurpose.SNAPSHOT, blobStore, "_blob"); + return new S3RetryingInputStream(randomPurpose(), blobStore, "_blob"); } } } diff --git a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/AbstractURLBlobStoreTests.java b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/AbstractURLBlobStoreTests.java index 92cb0c1cf75a2..132760c8b410b 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/AbstractURLBlobStoreTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/AbstractURLBlobStoreTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; @@ -21,6 +20,7 @@ import java.io.InputStream; import java.nio.file.NoSuchFileException; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.core.IsEqual.equalTo; public abstract class AbstractURLBlobStoreTests extends ESTestCase { @@ -34,7 +34,7 @@ public void testURLBlobStoreCanReadBlob() throws IOException { BytesArray data = getOriginalData(); String blobName = getBlobName(); BlobContainer container = getBlobContainer(); - try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, blobName)) { + try (InputStream stream = container.readBlob(randomPurpose(), blobName)) { BytesReference bytesRead = Streams.readFully(stream); assertThat(data, equalTo(bytesRead)); } @@ -46,7 +46,7 @@ public void testURLBlobStoreCanReadBlobRange() throws IOException { BlobContainer container = getBlobContainer(); int position = randomIntBetween(0, data.length() - 1); int length = randomIntBetween(1, data.length() - position); - try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, blobName, position, length)) { + try (InputStream stream = container.readBlob(randomPurpose(), blobName, position, length)) { BytesReference bytesRead = Streams.readFully(stream); assertThat(data.slice(position, length), equalTo(bytesRead)); } @@ -55,7 +55,7 @@ public void testURLBlobStoreCanReadBlobRange() throws IOException { public void testNoBlobFound() throws IOException { BlobContainer container = getBlobContainer(); String incorrectBlobName = UUIDs.base64UUID(); - try (InputStream ignored = container.readBlob(OperationPurpose.SNAPSHOT, incorrectBlobName)) { + try (InputStream ignored = container.readBlob(randomPurpose(), incorrectBlobName)) { ignored.read(); fail("Should have thrown NoSuchFileException exception"); } catch (NoSuchFileException e) { diff --git a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/FileURLBlobStoreTests.java b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/FileURLBlobStoreTests.java index 7bc793415c63e..6254ec1be6332 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/FileURLBlobStoreTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/FileURLBlobStoreTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.url.http.URLHttpClient; import org.elasticsearch.common.blobstore.url.http.URLHttpClientSettings; import org.elasticsearch.common.bytes.BytesArray; @@ -21,6 +20,7 @@ import java.nio.file.Files; import java.nio.file.Path; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.mockito.Mockito.mock; public class FileURLBlobStoreTests extends AbstractURLBlobStoreTests { @@ -60,6 +60,6 @@ String getBlobName() { @Override public void testURLBlobStoreCanReadBlobRange() throws IOException { - expectThrows(UnsupportedOperationException.class, () -> getBlobContainer().readBlob(OperationPurpose.SNAPSHOT, "test", 0, 12)); + expectThrows(UnsupportedOperationException.class, () -> getBlobContainer().readBlob(randomPurpose(), "test", 0, 12)); } } diff --git a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/HttpURLBlobStoreTests.java b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/HttpURLBlobStoreTests.java index f8d55ecab6ab8..b5be5cdbbe3d9 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/HttpURLBlobStoreTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/common/blobstore/url/HttpURLBlobStoreTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.url.http.URLHttpClient; import org.elasticsearch.common.blobstore.url.http.URLHttpClientSettings; import org.elasticsearch.common.bytes.BytesArray; @@ -36,6 +35,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; + @SuppressForbidden(reason = "use http server") public class HttpURLBlobStoreTests extends AbstractURLBlobStoreTests { private static final Pattern RANGE_PATTERN = Pattern.compile("bytes=(\\d+)-(\\d+)$"); @@ -127,14 +128,8 @@ String getBlobName() { public void testRangeReadOutsideOfLegalRange() { BlobContainer container = getBlobContainer(); - expectThrows( - IllegalArgumentException.class, - () -> container.readBlob(OperationPurpose.SNAPSHOT, blobName, -1, content.length).read() - ); - expectThrows( - IOException.class, - () -> container.readBlob(OperationPurpose.SNAPSHOT, blobName, content.length + 1, content.length).read() - ); + expectThrows(IllegalArgumentException.class, () -> container.readBlob(randomPurpose(), blobName, -1, content.length).read()); + expectThrows(IOException.class, () -> container.readBlob(randomPurpose(), blobName, content.length + 1, content.length).read()); } private String getEndpointForServer() { diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java index 6d7aca0ca1d56..592192f29c262 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java @@ -20,7 +20,6 @@ import org.apache.hadoop.util.Progressable; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.core.Streams; import org.elasticsearch.core.SuppressForbidden; @@ -44,6 +43,7 @@ import javax.security.auth.Subject; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase.randomBytes; import static org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase.readBlobFully; import static org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase.writeBlob; @@ -131,7 +131,7 @@ public void testReadOnly() throws Exception { byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); writeBlob(container, "foo", new BytesArray(data), randomBoolean()); assertArrayEquals(readBlobFully(container, "foo", data.length), data); - assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "foo")); + assertTrue(container.blobExists(randomPurpose(), "foo")); } public void testReadRange() throws Exception { @@ -162,7 +162,7 @@ public void testReadRange() throws Exception { int pos = randomIntBetween(0, data.length / 2); int len = randomIntBetween(pos, data.length) - pos; assertArrayEquals(readBlobPartially(container, "foo", pos, len), Arrays.copyOfRange(data, pos, pos + len)); - assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "foo")); + assertTrue(container.blobExists(randomPurpose(), "foo")); } public void testReplicationFactor() throws Exception { @@ -209,24 +209,24 @@ public void testListBlobsByPrefix() throws Exception { byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); writeBlob(container, "foo", new BytesArray(data), randomBoolean()); assertArrayEquals(readBlobFully(container, "foo", data.length), data); - assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "foo")); + assertTrue(container.blobExists(randomPurpose(), "foo")); writeBlob(container, "bar", new BytesArray(data), randomBoolean()); assertArrayEquals(readBlobFully(container, "bar", data.length), data); - assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "bar")); + assertTrue(container.blobExists(randomPurpose(), "bar")); - assertEquals(2, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, null).size()); - assertEquals(1, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "fo").size()); - assertEquals(0, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "noSuchFile").size()); + assertEquals(2, container.listBlobsByPrefix(randomPurpose(), null).size()); + assertEquals(1, container.listBlobsByPrefix(randomPurpose(), "fo").size()); + assertEquals(0, container.listBlobsByPrefix(randomPurpose(), "noSuchFile").size()); - container.delete(OperationPurpose.SNAPSHOT); - assertEquals(0, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, null).size()); - assertEquals(0, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "fo").size()); - assertEquals(0, container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "noSuchFile").size()); + container.delete(randomPurpose()); + assertEquals(0, container.listBlobsByPrefix(randomPurpose(), null).size()); + assertEquals(0, container.listBlobsByPrefix(randomPurpose(), "fo").size()); + assertEquals(0, container.listBlobsByPrefix(randomPurpose(), "noSuchFile").size()); } public static byte[] readBlobPartially(BlobContainer container, String name, int pos, int length) throws IOException { byte[] data = new byte[length]; - try (InputStream inputStream = container.readBlob(OperationPurpose.SNAPSHOT, name, pos, length)) { + try (InputStream inputStream = container.readBlob(randomPurpose(), name, pos, length)) { assertThat(Streams.readFully(inputStream, data), CoreMatchers.equalTo(length)); assertThat(inputStream.read(), CoreMatchers.equalTo(-1)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java index 0b1802fc71470..7886e628b26ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.RepositoryCleanupInProgress; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; @@ -24,6 +23,7 @@ import java.io.IOException; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFutureThrows; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -98,7 +98,7 @@ private ActionFuture startBlockedCleanup(String repoN garbageFuture, () -> repository.blobStore() .blobContainer(repository.basePath()) - .writeBlob(OperationPurpose.SNAPSHOT, "snap-foo.dat", new BytesArray(new byte[1]), true) + .writeBlob(randomPurpose(), "snap-foo.dat", new BytesArray(new byte[1]), true) ) ); garbageFuture.get(); @@ -147,7 +147,7 @@ public void testCleanupOldIndexN() throws ExecutionException, InterruptedExcepti () -> repository.blobStore() .blobContainer(repository.basePath()) .writeBlob( - OperationPurpose.SNAPSHOT, + randomPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + generation, new BytesArray(new byte[1]), true diff --git a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java index bb4aefc0388e6..1f54046630cf8 100644 --- a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java +++ b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.OptionalBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -47,6 +46,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -87,7 +87,7 @@ public void testReadBlobRangeCorrectlySkipBytes() throws IOException { final long start = randomLongBetween(0L, Math.max(0L, blobData.length - 1)); final long length = randomLongBetween(1L, blobData.length - start); - try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, blobName, start, length)) { + try (InputStream stream = container.readBlob(randomPurpose(), blobName, start, length)) { assertThat(totalBytesRead.get(), equalTo(0L)); assertThat(Streams.consumeFully(stream), equalTo(length)); assertThat(totalBytesRead.get(), equalTo(length)); @@ -119,11 +119,11 @@ public void testDeleteIgnoringIfNotExistsDoesNotThrowFileNotFound() throws IOExc path ); - container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, List.of(blobName).listIterator()); + container.deleteBlobsIgnoringIfNotExists(randomPurpose(), List.of(blobName).listIterator()); // Should not throw exception - container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, List.of(blobName).listIterator()); + container.deleteBlobsIgnoringIfNotExists(randomPurpose(), List.of(blobName).listIterator()); - assertFalse(container.blobExists(OperationPurpose.SNAPSHOT, blobName)); + assertFalse(container.blobExists(randomPurpose(), blobName)); } private static BytesReference getBytesAsync(Consumer> consumer) { @@ -150,11 +150,11 @@ public void testCompareAndExchange() throws Exception { for (int i = 0; i < 5; i++) { switch (between(1, 4)) { - case 1 -> assertEquals(expectedValue.get(), getBytesAsync(l -> container.getRegister(OperationPurpose.SNAPSHOT, key, l))); + case 1 -> assertEquals(expectedValue.get(), getBytesAsync(l -> container.getRegister(randomPurpose(), key, l))); case 2 -> assertFalse( getAsync( l -> container.compareAndSetRegister( - OperationPurpose.SNAPSHOT, + randomPurpose(), key, randomValueOtherThan(expectedValue.get(), () -> new BytesArray(randomByteArrayOfLength(8))), new BytesArray(randomByteArrayOfLength(8)), @@ -166,7 +166,7 @@ public void testCompareAndExchange() throws Exception { expectedValue.get(), getBytesAsync( l -> container.compareAndExchangeRegister( - OperationPurpose.SNAPSHOT, + randomPurpose(), key, randomValueOtherThan(expectedValue.get(), () -> new BytesArray(randomByteArrayOfLength(8))), new BytesArray(randomByteArrayOfLength(8)), @@ -181,26 +181,20 @@ public void testCompareAndExchange() throws Exception { final var newValue = new BytesArray(randomByteArrayOfLength(8)); if (randomBoolean()) { - assertTrue( - getAsync(l -> container.compareAndSetRegister(OperationPurpose.SNAPSHOT, key, expectedValue.get(), newValue, l)) - ); + assertTrue(getAsync(l -> container.compareAndSetRegister(randomPurpose(), key, expectedValue.get(), newValue, l))); } else { assertEquals( expectedValue.get(), - getBytesAsync( - l -> container.compareAndExchangeRegister(OperationPurpose.SNAPSHOT, key, expectedValue.get(), newValue, l) - ) + getBytesAsync(l -> container.compareAndExchangeRegister(randomPurpose(), key, expectedValue.get(), newValue, l)) ); } expectedValue.set(newValue); } - container.writeBlob(OperationPurpose.SNAPSHOT, key, new BytesArray(new byte[17]), false); + container.writeBlob(randomPurpose(), key, new BytesArray(new byte[17]), false); expectThrows( IllegalStateException.class, - () -> getBytesAsync( - l -> container.compareAndExchangeRegister(OperationPurpose.SNAPSHOT, key, expectedValue.get(), BytesArray.EMPTY, l) - ) + () -> getBytesAsync(l -> container.compareAndExchangeRegister(randomPurpose(), key, expectedValue.get(), BytesArray.EMPTY, l)) ); } @@ -234,25 +228,20 @@ private static void checkAtomicWrite() throws IOException { BlobPath.EMPTY, path ); - container.writeBlobAtomic( - OperationPurpose.SNAPSHOT, - blobName, - new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), - true - ); + container.writeBlobAtomic(randomPurpose(), blobName, new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), true); final var blobData = new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))); - container.writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, blobData, false); - assertEquals(blobData, Streams.readFully(container.readBlob(OperationPurpose.SNAPSHOT, blobName))); + container.writeBlobAtomic(randomPurpose(), blobName, blobData, false); + assertEquals(blobData, Streams.readFully(container.readBlob(randomPurpose(), blobName))); expectThrows( FileAlreadyExistsException.class, () -> container.writeBlobAtomic( - OperationPurpose.SNAPSHOT, + randomPurpose(), blobName, new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), true ) ); - for (String blob : container.listBlobs(OperationPurpose.SNAPSHOT).keySet()) { + for (String blob : container.listBlobs(randomPurpose()).keySet()) { assertFalse("unexpected temp blob [" + blob + "]", FsBlobContainer.isTempBlobName(blob)); } } diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index 9dd4fa4bfb84f..ef625706ffffe 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -68,6 +67,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.repositories.RepositoryDataTests.generateRandomRepoData; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -204,7 +204,7 @@ public void testCorruptIndexLatestFile() throws Exception { for (int i = 0; i < 16; i++) { repository.blobContainer() - .writeBlob(OperationPurpose.SNAPSHOT, BlobStoreRepository.INDEX_LATEST_BLOB, new BytesArray(buffer, 0, i), false); + .writeBlob(randomPurpose(), BlobStoreRepository.INDEX_LATEST_BLOB, new BytesArray(buffer, 0, i), false); if (i == 8) { assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(generation)); } else { diff --git a/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatTests.java b/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatTests.java index f9ec4786d2854..7fc8b689e9ae9 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; @@ -32,6 +31,7 @@ import java.util.Map; import java.util.function.Function; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.greaterThan; public class BlobStoreFormatTests extends ESTestCase { @@ -114,7 +114,7 @@ public void testCompressionIsApplied() throws IOException { BlobObj blobObj = new BlobObj(veryRedundantText.toString()); checksumFormat.write(blobObj, blobContainer, "blob-comp", true); checksumFormat.write(blobObj, blobContainer, "blob-not-comp", false); - Map blobs = blobContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "blob-"); + Map blobs = blobContainer.listBlobsByPrefix(randomPurpose(), "blob-"); assertEquals(blobs.size(), 2); assertThat(blobs.get("blob-not-comp").length(), greaterThan(blobs.get("blob-comp").length())); } @@ -147,8 +147,8 @@ protected BlobStore createTestBlobStore() throws IOException { } protected void randomCorruption(BlobContainer blobContainer, String blobName) throws IOException { - final byte[] buffer = new byte[(int) blobContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, blobName).get(blobName).length()]; - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, blobName)) { + final byte[] buffer = new byte[(int) blobContainer.listBlobsByPrefix(randomPurpose(), blobName).get(blobName).length()]; + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), blobName)) { Streams.readFully(inputStream, buffer); } final BytesArray corruptedBytes; @@ -164,7 +164,7 @@ protected void randomCorruption(BlobContainer blobContainer, String blobName) th // another sequence of 8 zero bytes anywhere in the file, let alone such a sequence followed by a correct checksum. corruptedBytes = new BytesArray(buffer, 0, location); } - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, blobName, corruptedBytes, false); + blobContainer.writeBlob(randomPurpose(), blobName, corruptedBytes, false); } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java index 2c91addd0839b..15f33131fa114 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -37,6 +36,7 @@ import java.util.Set; import java.util.concurrent.Executor; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -75,9 +75,7 @@ public void tearDown() throws Exception { private void deleteAndAssertEmpty(BlobPath path) { final BlobStoreRepository repo = getRepository(); final PlainActionFuture future = new PlainActionFuture<>(); - repo.threadPool() - .generic() - .execute(ActionRunnable.run(future, () -> repo.blobStore().blobContainer(path).delete(OperationPurpose.SNAPSHOT))); + repo.threadPool().generic().execute(ActionRunnable.run(future, () -> repo.blobStore().blobContainer(path).delete(randomPurpose()))); future.actionGet(); final BlobPath parent = path.parent(); if (parent == null) { @@ -131,28 +129,16 @@ public void testListChildren() throws Exception { final BlobStore blobStore = repo.blobStore(); blobStore.blobContainer(repo.basePath().add("foo")) .writeBlob( - OperationPurpose.SNAPSHOT, + randomPurpose(), "nested-blob", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false ); blobStore.blobContainer(repo.basePath().add("foo").add("nested")) - .writeBlob( - OperationPurpose.SNAPSHOT, - "bar", - new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), - testBlobLen, - false - ); + .writeBlob(randomPurpose(), "bar", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); blobStore.blobContainer(repo.basePath().add("foo").add("nested2")) - .writeBlob( - OperationPurpose.SNAPSHOT, - "blub", - new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), - testBlobLen, - false - ); + .writeBlob(randomPurpose(), "blub", new ByteArrayInputStream(randomByteArrayOfLength(testBlobLen)), testBlobLen, false); })); future.actionGet(); assertChildren(repo.basePath(), Collections.singleton("foo")); @@ -265,7 +251,7 @@ private static BytesReference readIndexLatest(BlobStoreRepository repository) th repository.blobStore() .blobContainer(repository.basePath()) .readBlob( - OperationPurpose.SNAPSHOT, + randomPurpose(), // Deliberately not using BlobStoreRepository#INDEX_LATEST_BLOB here, it's important for external systems that a // blob with literally this name is updated on each write: "index.latest" @@ -286,10 +272,10 @@ private static void createDanglingIndex(final BlobStoreRepository repo, final Ex genericExec.execute(ActionRunnable.run(future, () -> { final BlobStore blobStore = repo.blobStore(); blobStore.blobContainer(repo.basePath().add("indices").add("foo")) - .writeBlob(OperationPurpose.SNAPSHOT, "bar", new ByteArrayInputStream(new byte[3]), 3, false); + .writeBlob(randomPurpose(), "bar", new ByteArrayInputStream(new byte[3]), 3, false); for (String prefix : Arrays.asList("snap-", "meta-")) { blobStore.blobContainer(repo.basePath()) - .writeBlob(OperationPurpose.SNAPSHOT, prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); + .writeBlob(randomPurpose(), prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); } })); future.get(); @@ -297,10 +283,10 @@ private static void createDanglingIndex(final BlobStoreRepository repo, final Ex final PlainActionFuture corruptionFuture = new PlainActionFuture<>(); genericExec.execute(ActionRunnable.supply(corruptionFuture, () -> { final BlobStore blobStore = repo.blobStore(); - return blobStore.blobContainer(repo.basePath().add("indices")).children(OperationPurpose.SNAPSHOT).containsKey("foo") - && blobStore.blobContainer(repo.basePath().add("indices").add("foo")).blobExists(OperationPurpose.SNAPSHOT, "bar") - && blobStore.blobContainer(repo.basePath()).blobExists(OperationPurpose.SNAPSHOT, "meta-foo.dat") - && blobStore.blobContainer(repo.basePath()).blobExists(OperationPurpose.SNAPSHOT, "snap-foo.dat"); + return blobStore.blobContainer(repo.basePath().add("indices")).children(randomPurpose()).containsKey("foo") + && blobStore.blobContainer(repo.basePath().add("indices").add("foo")).blobExists(randomPurpose(), "bar") + && blobStore.blobContainer(repo.basePath()).blobExists(randomPurpose(), "meta-foo.dat") + && blobStore.blobContainer(repo.basePath()).blobExists(randomPurpose(), "snap-foo.dat"); })); assertTrue(corruptionFuture.get()); } @@ -320,9 +306,7 @@ private Set listChildren(BlobPath path) { final BlobStoreRepository repository = getRepository(); repository.threadPool() .generic() - .execute( - ActionRunnable.supply(future, () -> repository.blobStore().blobContainer(path).children(OperationPurpose.SNAPSHOT).keySet()) - ); + .execute(ActionRunnable.supply(future, () -> repository.blobStore().blobContainer(path).children(randomPurpose()).keySet())); return future.actionGet(); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index e23b26c73a811..4110472e8ef76 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -14,7 +14,6 @@ import org.apache.http.ConnectionClosedException; import org.apache.http.HttpStatus; import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.unit.ByteSizeValue; @@ -42,6 +41,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; @@ -94,9 +94,9 @@ public void testReadNonexistentBlobThrowsNoSuchFileException() { final int length = randomIntBetween(1, Math.toIntExact(Math.min(Integer.MAX_VALUE, MAX_RANGE_VAL - position))); final Exception exception = expectThrows(NoSuchFileException.class, () -> { if (randomBoolean()) { - Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob")); + Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_nonexistent_blob")); } else { - Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob", 0, 1)); + Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_nonexistent_blob", 0, 1)); } }); final String fullBlobPath = blobContainer.path().buildAsString() + "read_nonexistent_blob"; @@ -104,7 +104,7 @@ public void testReadNonexistentBlobThrowsNoSuchFileException() { assertThat( expectThrows( NoSuchFileException.class, - () -> Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_nonexistent_blob", position, length)) + () -> Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_nonexistent_blob", position, length)) ).getMessage().toLowerCase(Locale.ROOT), containsString("blob object [" + fullBlobPath + "] not found") ); @@ -146,7 +146,7 @@ public void testReadBlobWithRetries() throws Exception { } }); - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_blob_max_retries")) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -212,7 +212,7 @@ public void testReadRangeBlobWithRetries() throws Exception { final int position = randomIntBetween(0, bytes.length - 1); final int length = randomIntBetween(0, randomBoolean() ? bytes.length : Integer.MAX_VALUE); - try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_range_blob_max_retries", position, length)) { + try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_range_blob_max_retries", position, length)) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -252,7 +252,7 @@ public void testReadBlobWithReadTimeouts() { Exception exception = expectThrows( unresponsiveExceptionType(), - () -> Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_unresponsive")) + () -> Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_blob_unresponsive")) ); assertThat(exception.getMessage().toLowerCase(Locale.ROOT), containsString("read timed out")); assertThat(exception.getCause(), instanceOf(SocketTimeoutException.class)); @@ -269,8 +269,8 @@ public void testReadBlobWithReadTimeouts() { exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_incomplete") - : blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_incomplete", position, length) + ? blobContainer.readBlob(randomPurpose(), "read_blob_incomplete") + : blobContainer.readBlob(randomPurpose(), "read_blob_incomplete", position, length) ) { Streams.readFully(stream); } @@ -298,9 +298,9 @@ public void testReadBlobWithNoHttpResponse() { Exception exception = expectThrows(unresponsiveExceptionType(), () -> { if (randomBoolean()) { - Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_no_response")); + Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_blob_no_response")); } else { - Streams.readFully(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_no_response", 0, 1)); + Streams.readFully(blobContainer.readBlob(randomPurpose(), "read_blob_no_response", 0, 1)); } }); assertThat( @@ -323,8 +323,8 @@ public void testReadBlobWithPrematureConnectionClose() { final Exception exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_incomplete", 0, 1) - : blobContainer.readBlob(OperationPurpose.SNAPSHOT, "read_blob_incomplete") + ? blobContainer.readBlob(randomPurpose(), "read_blob_incomplete", 0, 1) + : blobContainer.readBlob(randomPurpose(), "read_blob_incomplete") ) { Streams.readFully(stream); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index a7914899c4ce1..383c2b3c2d13b 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -65,6 +65,7 @@ import java.util.stream.Collectors; import static org.apache.lucene.tests.util.LuceneTestCase.random; +import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomIntBetween; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; @@ -104,7 +105,7 @@ public static PlainActionFuture assertConsistencyAsync(BlobStore try { final BlobContainer blobContainer = repository.blobContainer(); final long latestGen; - try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob(OperationPurpose.SNAPSHOT, "index.latest"))) { + try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob(randomPurpose(), "index.latest"))) { latestGen = inputStream.readLong(); } catch (NoSuchFileException e) { throw new AssertionError("Could not find index.latest blob for repo [" + repository + "]"); @@ -112,7 +113,7 @@ public static PlainActionFuture assertConsistencyAsync(BlobStore assertIndexGenerations(blobContainer, latestGen); final RepositoryData repositoryData; try ( - InputStream blob = blobContainer.readBlob(OperationPurpose.SNAPSHOT, BlobStoreRepository.INDEX_FILE_PREFIX + latestGen); + InputStream blob = blobContainer.readBlob(randomPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + latestGen); XContentParser parser = XContentType.JSON.xContent() .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), blob) ) { @@ -153,7 +154,7 @@ public void onFailure(Exception e) { } private static void assertIndexGenerations(BlobContainer repoRoot, long latestGen) throws IOException { - final long[] indexGenerations = repoRoot.listBlobsByPrefix(OperationPurpose.SNAPSHOT, BlobStoreRepository.INDEX_FILE_PREFIX) + final long[] indexGenerations = repoRoot.listBlobsByPrefix(randomPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX) .keySet() .stream() .map(s -> s.replace(BlobStoreRepository.INDEX_FILE_PREFIX, "")) @@ -165,12 +166,12 @@ private static void assertIndexGenerations(BlobContainer repoRoot, long latestGe } private static void assertShardIndexGenerations(BlobContainer repoRoot, ShardGenerations shardGenerations) throws IOException { - final BlobContainer indicesContainer = repoRoot.children(OperationPurpose.SNAPSHOT).get("indices"); + final BlobContainer indicesContainer = repoRoot.children(randomPurpose()).get("indices"); for (IndexId index : shardGenerations.indices()) { final List gens = shardGenerations.getGens(index); if (gens.isEmpty() == false) { - final BlobContainer indexContainer = indicesContainer.children(OperationPurpose.SNAPSHOT).get(index.getId()); - final Map shardContainers = indexContainer.children(OperationPurpose.SNAPSHOT); + final BlobContainer indexContainer = indicesContainer.children(randomPurpose()).get(index.getId()); + final Map shardContainers = indexContainer.children(randomPurpose()); for (int i = 0; i < gens.size(); i++) { final ShardGeneration generation = gens.get(i); assertThat(generation, not(ShardGenerations.DELETED_SHARD_GEN)); @@ -178,8 +179,7 @@ private static void assertShardIndexGenerations(BlobContainer repoRoot, ShardGen final String shardId = Integer.toString(i); assertThat(shardContainers, hasKey(shardId)); assertThat( - shardContainers.get(shardId) - .listBlobsByPrefix(OperationPurpose.SNAPSHOT, BlobStoreRepository.INDEX_FILE_PREFIX), + shardContainers.get(shardId).listBlobsByPrefix(randomPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX), hasKey(BlobStoreRepository.INDEX_FILE_PREFIX + generation) ); } @@ -190,13 +190,13 @@ private static void assertShardIndexGenerations(BlobContainer repoRoot, ShardGen private static void assertIndexUUIDs(BlobStoreRepository repository, RepositoryData repositoryData) throws IOException { final List expectedIndexUUIDs = repositoryData.getIndices().values().stream().map(IndexId::getId).toList(); - final BlobContainer indicesContainer = repository.blobContainer().children(OperationPurpose.SNAPSHOT).get("indices"); + final BlobContainer indicesContainer = repository.blobContainer().children(randomPurpose()).get("indices"); final List foundIndexUUIDs; if (indicesContainer == null) { foundIndexUUIDs = Collections.emptyList(); } else { // Skip Lucene MockFS extraN directory - foundIndexUUIDs = indicesContainer.children(OperationPurpose.SNAPSHOT) + foundIndexUUIDs = indicesContainer.children(randomPurpose()) .keySet() .stream() .filter(s -> s.startsWith("extra") == false) @@ -204,9 +204,9 @@ private static void assertIndexUUIDs(BlobStoreRepository repository, RepositoryD } assertThat(foundIndexUUIDs, containsInAnyOrder(expectedIndexUUIDs.toArray(Strings.EMPTY_ARRAY))); for (String indexId : foundIndexUUIDs) { - final Set indexMetaGenerationsFound = indicesContainer.children(OperationPurpose.SNAPSHOT) + final Set indexMetaGenerationsFound = indicesContainer.children(randomPurpose()) .get(indexId) - .listBlobsByPrefix(OperationPurpose.SNAPSHOT, BlobStoreRepository.METADATA_PREFIX) + .listBlobsByPrefix(randomPurpose(), BlobStoreRepository.METADATA_PREFIX) .keySet() .stream() .map(p -> p.replace(BlobStoreRepository.METADATA_PREFIX, "").replace(".dat", "")) @@ -231,7 +231,7 @@ private static void assertSnapshotUUIDs( final Collection snapshotIds = repositoryData.getSnapshotIds(); final List expectedSnapshotUUIDs = snapshotIds.stream().map(SnapshotId::getUUID).toList(); for (String prefix : new String[] { BlobStoreRepository.SNAPSHOT_PREFIX, BlobStoreRepository.METADATA_PREFIX }) { - final Collection foundSnapshotUUIDs = repoRoot.listBlobs(OperationPurpose.SNAPSHOT) + final Collection foundSnapshotUUIDs = repoRoot.listBlobs(randomPurpose()) .keySet() .stream() .filter(p -> p.startsWith(prefix)) @@ -240,12 +240,12 @@ private static void assertSnapshotUUIDs( assertThat(foundSnapshotUUIDs, containsInAnyOrder(expectedSnapshotUUIDs.toArray(Strings.EMPTY_ARRAY))); } - final BlobContainer indicesContainer = repository.getBlobContainer().children(OperationPurpose.SNAPSHOT).get("indices"); + final BlobContainer indicesContainer = repository.getBlobContainer().children(randomPurpose()).get("indices"); final Map indices; if (indicesContainer == null) { indices = Collections.emptyMap(); } else { - indices = indicesContainer.children(OperationPurpose.SNAPSHOT); + indices = indicesContainer.children(randomPurpose()); } if (snapshotIds.isEmpty()) { listener.onResponse(null); @@ -298,7 +298,7 @@ private static void assertSnapshotInfosConsistency( assertThat(indices, hasKey(indexId.getId())); final BlobContainer indexContainer = indices.get(indexId.getId()); assertThat( - indexContainer.listBlobs(OperationPurpose.SNAPSHOT), + indexContainer.listBlobs(randomPurpose()), hasKey( String.format( Locale.ROOT, @@ -308,7 +308,7 @@ private static void assertSnapshotInfosConsistency( ) ); final IndexMetadata indexMetadata = repository.getSnapshotIndexMetaData(repositoryData, snapshotId, indexId); - for (Map.Entry entry : indexContainer.children(OperationPurpose.SNAPSHOT).entrySet()) { + for (Map.Entry entry : indexContainer.children(randomPurpose()).entrySet()) { // Skip Lucene MockFS extraN directory if (entry.getKey().startsWith("extra")) { continue; @@ -322,10 +322,7 @@ private static void assertSnapshotInfosConsistency( final BlobContainer shardContainer = entry.getValue(); // TODO: we shouldn't be leaking empty shard directories when a shard (but not all of the index it belongs to) // becomes unreferenced. We should fix that and remove this conditional once its fixed. - if (shardContainer.listBlobs(OperationPurpose.SNAPSHOT) - .keySet() - .stream() - .anyMatch(blob -> blob.startsWith("extra") == false)) { + if (shardContainer.listBlobs(randomPurpose()).keySet().stream().anyMatch(blob -> blob.startsWith("extra") == false)) { final int impliedCount = shardId - 1; maxShardCountsSeen.compute( indexId, @@ -336,7 +333,7 @@ private static void assertSnapshotInfosConsistency( && snapshotInfo.shardFailures() .stream() .noneMatch(shardFailure -> shardFailure.index().equals(index) && shardFailure.shardId() == shardId)) { - final Map shardPathContents = shardContainer.listBlobs(OperationPurpose.SNAPSHOT); + final Map shardPathContents = shardContainer.listBlobs(randomPurpose()); assertThat( shardPathContents, hasKey(String.format(Locale.ROOT, BlobStoreRepository.SNAPSHOT_NAME_FORMAT, snapshotId.getUUID())) @@ -376,10 +373,7 @@ public static void assertBlobsByPrefix(BlobStoreRepository repository, BlobPath repository.threadPool() .generic() .execute( - ActionRunnable.supply( - future, - () -> repository.blobStore().blobContainer(path).listBlobsByPrefix(OperationPurpose.SNAPSHOT, prefix) - ) + ActionRunnable.supply(future, () -> repository.blobStore().blobContainer(path).listBlobsByPrefix(randomPurpose(), prefix)) ); Map foundBlobs = future.actionGet(); if (blobs.isEmpty()) { @@ -464,4 +458,8 @@ private static ClusterService mockClusterService(ClusterState initialState) { when(clusterApplierService.threadPool()).thenReturn(threadPool); return clusterService; } + + public static OperationPurpose randomPurpose() { + return randomFrom(OperationPurpose.values()); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 3baadbc7d68be..578a7898bcd1e 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; @@ -63,6 +62,7 @@ import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.READONLY_SETTING_KEY; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.SNAPSHOT_INDEX_NAME_FORMAT; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.SNAPSHOT_NAME_FORMAT; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; @@ -124,7 +124,7 @@ public void testReadNonExistingPath() throws IOException { try (BlobStore store = newBlobStore()) { final BlobContainer container = store.blobContainer(BlobPath.EMPTY); expectThrows(NoSuchFileException.class, () -> { - try (InputStream is = container.readBlob(OperationPurpose.SNAPSHOT, "non-existing")) { + try (InputStream is = container.readBlob(randomPurpose(), "non-existing")) { is.read(); } }); @@ -141,7 +141,7 @@ public void testWriteRead() throws IOException { data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); writeBlob(container, "foobar", new BytesArray(data), false); } - try (InputStream stream = container.readBlob(OperationPurpose.SNAPSHOT, "foobar")) { + try (InputStream stream = container.readBlob(randomPurpose(), "foobar")) { BytesRefBuilder target = new BytesRefBuilder(); while (target.length() < data.length) { byte[] buffer = new byte[scaledRandomIntBetween(1, data.length - target.length())]; @@ -156,14 +156,14 @@ public void testWriteRead() throws IOException { assertEquals(data.length, target.length()); assertArrayEquals(data, Arrays.copyOfRange(target.bytes(), 0, target.length())); } - container.delete(OperationPurpose.SNAPSHOT); + container.delete(randomPurpose()); } } public void testList() throws IOException { try (BlobStore store = newBlobStore()) { final BlobContainer container = store.blobContainer(BlobPath.EMPTY); - assertThat(container.listBlobs(OperationPurpose.SNAPSHOT).size(), CoreMatchers.equalTo(0)); + assertThat(container.listBlobs(randomPurpose()).size(), CoreMatchers.equalTo(0)); int numberOfFooBlobs = randomIntBetween(0, 10); int numberOfBarBlobs = randomIntBetween(3, 20); Map generatedBlobs = new HashMap<>(); @@ -184,7 +184,7 @@ public void testList() throws IOException { generatedBlobs.put(name, (long) length); writeRandomBlob(container, name, length); - Map blobs = container.listBlobs(OperationPurpose.SNAPSHOT); + Map blobs = container.listBlobs(randomPurpose()); assertThat(blobs.size(), CoreMatchers.equalTo(numberOfFooBlobs + numberOfBarBlobs)); for (Map.Entry generated : generatedBlobs.entrySet()) { BlobMetadata blobMetadata = blobs.get(generated.getKey()); @@ -193,10 +193,10 @@ public void testList() throws IOException { assertThat(blobMetadata.length(), CoreMatchers.equalTo(blobLengthFromContentLength(generated.getValue()))); } - assertThat(container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "foo-").size(), CoreMatchers.equalTo(numberOfFooBlobs)); - assertThat(container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "bar-").size(), CoreMatchers.equalTo(numberOfBarBlobs)); - assertThat(container.listBlobsByPrefix(OperationPurpose.SNAPSHOT, "baz-").size(), CoreMatchers.equalTo(0)); - container.delete(OperationPurpose.SNAPSHOT); + assertThat(container.listBlobsByPrefix(randomPurpose(), "foo-").size(), CoreMatchers.equalTo(numberOfFooBlobs)); + assertThat(container.listBlobsByPrefix(randomPurpose(), "bar-").size(), CoreMatchers.equalTo(numberOfBarBlobs)); + assertThat(container.listBlobsByPrefix(randomPurpose(), "baz-").size(), CoreMatchers.equalTo(0)); + container.delete(randomPurpose()); } } @@ -204,17 +204,17 @@ public void testDeleteBlobs() throws IOException { try (BlobStore store = newBlobStore()) { final List blobNames = Arrays.asList("foobar", "barfoo"); final BlobContainer container = store.blobContainer(BlobPath.EMPTY); - container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNames.iterator()); // does not raise when blobs + container.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobNames.iterator()); // does not raise when blobs // don't exist byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); final BytesArray bytesArray = new BytesArray(data); for (String blobName : blobNames) { writeBlob(container, blobName, bytesArray, randomBoolean()); } - assertEquals(container.listBlobs(OperationPurpose.SNAPSHOT).size(), 2); - container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNames.iterator()); - assertTrue(container.listBlobs(OperationPurpose.SNAPSHOT).isEmpty()); - container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobNames.iterator()); // does not raise when blobs + assertEquals(container.listBlobs(randomPurpose()).size(), 2); + container.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobNames.iterator()); + assertTrue(container.listBlobs(randomPurpose()).isEmpty()); + container.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobNames.iterator()); // does not raise when blobs // don't exist } } @@ -226,9 +226,9 @@ public static void writeBlob( boolean failIfAlreadyExists ) throws IOException { if (randomBoolean()) { - container.writeBlob(OperationPurpose.SNAPSHOT, blobName, bytesArray, failIfAlreadyExists); + container.writeBlob(randomPurpose(), blobName, bytesArray, failIfAlreadyExists); } else { - container.writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, bytesArray, failIfAlreadyExists); + container.writeBlobAtomic(randomPurpose(), blobName, bytesArray, failIfAlreadyExists); } } @@ -244,10 +244,10 @@ public void testContainerCreationAndDeletion() throws IOException { assertArrayEquals(readBlobFully(containerFoo, "test", data1.length), data1); assertArrayEquals(readBlobFully(containerBar, "test", data2.length), data2); - assertTrue(containerFoo.blobExists(OperationPurpose.SNAPSHOT, "test")); - assertTrue(containerBar.blobExists(OperationPurpose.SNAPSHOT, "test")); - containerBar.delete(OperationPurpose.SNAPSHOT); - containerFoo.delete(OperationPurpose.SNAPSHOT); + assertTrue(containerFoo.blobExists(randomPurpose(), "test")); + assertTrue(containerBar.blobExists(randomPurpose(), "test")); + containerBar.delete(randomPurpose()); + containerFoo.delete(randomPurpose()); } } @@ -259,7 +259,7 @@ public static byte[] writeRandomBlob(BlobContainer container, String name, int l public static byte[] readBlobFully(BlobContainer container, String name, int length) throws IOException { byte[] data = new byte[length]; - try (InputStream inputStream = container.readBlob(OperationPurpose.SNAPSHOT, name)) { + try (InputStream inputStream = container.readBlob(randomPurpose(), name)) { assertThat(Streams.readFully(inputStream, data), CoreMatchers.equalTo(length)); assertThat(inputStream.read(), CoreMatchers.equalTo(-1)); } @@ -275,7 +275,7 @@ public static byte[] randomBytes(int length) { } protected static void writeBlob(BlobContainer container, String blobName, BytesArray bytesArray) throws IOException { - container.writeBlob(OperationPurpose.SNAPSHOT, blobName, bytesArray, true); + container.writeBlob(randomPurpose(), blobName, bytesArray, true); } protected BlobStore newBlobStore() { @@ -488,7 +488,7 @@ public void testIndicesDeletedFromRepository() throws Exception { for (IndexId indexId : repositoryData.actionGet().getIndices().values()) { if (indexId.getName().equals("test-idx-3")) { - assertFalse(indicesBlobContainer.get().blobExists(OperationPurpose.SNAPSHOT, indexId.getId())); // deleted index + assertFalse(indicesBlobContainer.get().blobExists(randomPurpose(), indexId.getId())); // deleted index } } @@ -507,7 +507,7 @@ public void testBlobStoreBulkDeletion() throws Exception { for (int j = 0; j < numberOfBlobsPerContainer; j++) { byte[] bytes = randomBytes(randomInt(100)); String blobName = randomAlphaOfLength(10); - container.writeBlob(OperationPurpose.SNAPSHOT, blobName, new BytesArray(bytes), false); + container.writeBlob(randomPurpose(), blobName, new BytesArray(bytes), false); if (randomBoolean()) { blobsToDelete.add(containerPath.buildAsString() + blobName); } else { @@ -516,14 +516,14 @@ public void testBlobStoreBulkDeletion() throws Exception { } } - store.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, blobsToDelete.iterator()); + store.deleteBlobsIgnoringIfNotExists(randomPurpose(), blobsToDelete.iterator()); for (var containerEntry : expectedBlobsPerContainer.entrySet()) { BlobContainer blobContainer = store.blobContainer(containerEntry.getKey()); - Map blobsInContainer = blobContainer.listBlobs(OperationPurpose.SNAPSHOT); + Map blobsInContainer = blobContainer.listBlobs(randomPurpose()); for (String expectedBlob : containerEntry.getValue()) { assertThat(blobsInContainer, hasKey(expectedBlob)); } - blobContainer.delete(OperationPurpose.SNAPSHOT); + blobContainer.delete(randomPurpose()); } } } @@ -556,7 +556,7 @@ public void testDanglingShardLevelBlobCleanup() throws Exception { // Create an extra dangling blob as if from an earlier snapshot that failed to clean up shardContainer.writeBlob( - OperationPurpose.SNAPSHOT, + randomPurpose(), BlobStoreRepository.UPLOADED_DATA_BLOB_PREFIX + UUIDs.randomBase64UUID(random()), BytesArray.EMPTY, true @@ -580,7 +580,7 @@ public void testDanglingShardLevelBlobCleanup() throws Exception { assertAcked(client.admin().cluster().prepareDeleteSnapshot(repoName, "snapshot-1")); // Retrieve the blobs actually present - final var actualBlobs = shardContainer.listBlobs(OperationPurpose.SNAPSHOT) + final var actualBlobs = shardContainer.listBlobs(randomPurpose()) .keySet() .stream() .filter(f -> ExtrasFS.isExtra(f) == false) diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java index 8e94b3fa41fcf..43b0fb7025bd8 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.IOUtils; @@ -24,6 +23,7 @@ import java.util.stream.Stream; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.READONLY_SETTING_KEY; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.instanceOf; @@ -114,7 +114,7 @@ public void testReadOnly() throws Exception { byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); writeBlob(container, "test", new BytesArray(data)); assertArrayEquals(readBlobFully(container, "test", data.length), data); - assertTrue(container.blobExists(OperationPurpose.SNAPSHOT, "test")); + assertTrue(container.blobExists(randomPurpose(), "test")); } } } From c0bf49a9568eb333a927269cce31fd0b426358dc Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 30 Nov 2023 07:59:00 +0100 Subject: [PATCH 090/263] [Profiling] Notify early about task cancellation (#102740) With this commit we add explicit checks in selected places of `TransportGetStackTracesAction` whether the parent task has been cancelled. This can happen for example when a request times out and the client closes the TCP connection. While parent task cancellation is checked explicitly before making internal calls (such as an mget) this can lead to excessive log output due to the nature of these requests (we usually send multiple of them concurrently). With the explicit checks, the listener is notified as early as possible and request processing is cancelled, thus reducing log output. Note that this is only best effort as a task might be cancelled in between our explicit check and the one done by the internal client. --- docs/changelog/102740.yaml | 5 ++ .../xpack/profiling/ProfilingTestCase.java | 1 - .../TransportGetStackTracesAction.java | 50 ++++++++++++++++--- 3 files changed, 48 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/102740.yaml diff --git a/docs/changelog/102740.yaml b/docs/changelog/102740.yaml new file mode 100644 index 0000000000000..b7fc10eb19ddb --- /dev/null +++ b/docs/changelog/102740.yaml @@ -0,0 +1,5 @@ +pr: 102740 +summary: "[Profiling] Notify early about task cancellation" +area: Application +type: enhancement +issues: [] diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java index ed3995a9c9ea7..6a95b7c8d8573 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/ProfilingTestCase.java @@ -81,7 +81,6 @@ protected final void createIndex(String name, String bodyFileName) throws Except .execute() .get(); assertTrue("Creation of [" + name + "] is not acknowledged.", response.isAcknowledged()); - assertTrue("Shards for [" + name + "] are not acknowledged.", response.isShardsAcknowledged()); } /** diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 4dc4929b666f8..7944c421e8135 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -41,7 +41,9 @@ import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectPath; @@ -157,13 +159,30 @@ protected void doExecute(Task submitTask, GetStackTracesRequest request, ActionL responseBuilder.setCustomCostPerCoreHour(request.getCustomCostPerCoreHour()); Client client = new ParentTaskAssigningClient(this.nodeClient, transportService.getLocalNode(), submitTask); if (request.getIndices() == null) { - searchProfilingEvents(client, request, submitListener, responseBuilder); + searchProfilingEvents(submitTask, client, request, submitListener, responseBuilder); } else { - searchGenericEvents(client, request, submitListener, responseBuilder); + searchGenericEvents(submitTask, client, request, submitListener, responseBuilder); + } + } + + /** + * Checks whether a task has been cancelled and notifies the provided listener if required. + * @param task The task to check. May be a cancelable task. + * @param listener Listener to notify. + * @return true iff the task has been cancelled. Callers must terminate as early as possible. + */ + private boolean mayNotifyOfCancellation(Task task, ActionListener listener) { + if (task instanceof CancellableTask && ((CancellableTask) task).isCancelled()) { + log.info("{} got cancelled.", task); + listener.onFailure(new TaskCancelledException("get stacktraces task cancelled")); + return true; + } else { + return false; } } private void searchProfilingEvents( + Task submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -186,7 +205,7 @@ private void searchProfilingEvents( resampledIndex ); log.debug(watch::report); - searchEventGroupedByStackTrace(client, request, submitListener, responseBuilder, resampledIndex); + searchEventGroupedByStackTrace(submitTask, client, request, submitListener, responseBuilder, resampledIndex); }, e -> { // All profiling-events data streams are created lazily. In a relatively empty cluster it can happen that there are so few // data that we need to resort to the "full" events stream. As this is an edge case we'd rather fail instead of prematurely @@ -195,7 +214,7 @@ private void searchProfilingEvents( String missingIndex = ((IndexNotFoundException) e).getIndex().getName(); EventsIndex fullIndex = EventsIndex.FULL_INDEX; log.debug("Index [{}] does not exist. Using [{}] instead.", missingIndex, fullIndex.getName()); - searchEventGroupedByStackTrace(client, request, submitListener, responseBuilder, fullIndex); + searchEventGroupedByStackTrace(submitTask, client, request, submitListener, responseBuilder, fullIndex); } else { submitListener.onFailure(e); } @@ -203,6 +222,7 @@ private void searchProfilingEvents( } private void searchGenericEvents( + Task submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -218,7 +238,7 @@ private void searchGenericEvents( .addAggregation( new CountedTermsAggregationBuilder("group_by").size(MAX_TRACE_EVENTS_RESULT_SIZE).field(request.getStackTraceIds()) ) - .execute(handleEventsGroupedByStackTrace(client, responseBuilder, submitListener, searchResponse -> { + .execute(handleEventsGroupedByStackTrace(submitTask, client, responseBuilder, submitListener, searchResponse -> { long totalSamples = 0; StringTerms stacktraces = searchResponse.getAggregations().get("group_by"); @@ -249,6 +269,7 @@ private void searchGenericEvents( } private void searchEventGroupedByStackTrace( + Task submitTask, Client client, GetStackTracesRequest request, ActionListener submitListener, @@ -285,7 +306,7 @@ private void searchEventGroupedByStackTrace( ) ) .addAggregation(new SumAggregationBuilder("total_count").field("Stacktrace.count")) - .execute(handleEventsGroupedByStackTrace(client, responseBuilder, submitListener, searchResponse -> { + .execute(handleEventsGroupedByStackTrace(submitTask, client, responseBuilder, submitListener, searchResponse -> { long totalCount = getAggValueAsLong(searchResponse, "total_count"); Resampler resampler = new Resampler(request, responseBuilder.getSamplingRate(), totalCount); @@ -339,6 +360,7 @@ The same stacktraces may come from different hosts (eventually from different da } private ActionListener handleEventsGroupedByStackTrace( + Task submitTask, Client client, GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener, @@ -356,7 +378,7 @@ private ActionListener handleEventsGroupedByStackTrace( responseBuilder.setStart(Instant.ofEpochMilli(minTime)); responseBuilder.setEnd(Instant.ofEpochMilli(maxTime)); responseBuilder.setStackTraceEvents(stackTraceEvents); - retrieveStackTraces(client, responseBuilder, submitListener); + retrieveStackTraces(submitTask, client, responseBuilder, submitListener); } else { submitListener.onResponse(responseBuilder.build()); } @@ -377,10 +399,14 @@ private static long getAggValueAsLong(SearchResponse searchResponse, String fiel } private void retrieveStackTraces( + Task submitTask, Client client, GetStackTracesResponseBuilder responseBuilder, ActionListener submitListener ) { + if (mayNotifyOfCancellation(submitTask, submitListener)) { + return; + } List eventIds = new ArrayList<>(responseBuilder.getStackTraceEvents().keySet()); List> slicedEventIds = sliced(eventIds, desiredSlices); ClusterState clusterState = clusterService.state(); @@ -393,6 +419,7 @@ private void retrieveStackTraces( } StackTraceHandler handler = new StackTraceHandler( + submitTask, clusterState, client, responseBuilder, @@ -452,6 +479,7 @@ static List> sliced(List c, int slices) { private class StackTraceHandler { private final AtomicInteger expectedResponses; + private final Task submitTask; private final ClusterState clusterState; private final Client client; private final GetStackTracesResponseBuilder responseBuilder; @@ -468,6 +496,7 @@ private class StackTraceHandler { private final Map hostMetadata; private StackTraceHandler( + Task submitTask, ClusterState clusterState, Client client, GetStackTracesResponseBuilder responseBuilder, @@ -476,6 +505,7 @@ private StackTraceHandler( int expectedResponses, int expectedHosts ) { + this.submitTask = submitTask; this.clusterState = clusterState; this.stackTracePerId = new ConcurrentHashMap<>(stackTraceCount); this.expectedResponses = new AtomicInteger(expectedResponses); @@ -574,6 +604,7 @@ public void mayFinish() { ); log.debug(watch::report); retrieveStackTraceDetails( + submitTask, clusterState, client, responseBuilder, @@ -586,6 +617,7 @@ public void mayFinish() { } private void retrieveStackTraceDetails( + Task submitTask, ClusterState clusterState, Client client, GetStackTracesResponseBuilder responseBuilder, @@ -593,6 +625,10 @@ private void retrieveStackTraceDetails( List executableIds, ActionListener submitListener ) { + if (mayNotifyOfCancellation(submitTask, submitListener)) { + return; + } + List> slicedStackFrameIds = sliced(stackFrameIds, desiredDetailSlices); List> slicedExecutableIds = sliced(executableIds, desiredDetailSlices); List stackFrameIndices = resolver.resolve( From bd154e09c63e9f69e3bfac15b0e3021445ce0f10 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 30 Nov 2023 18:53:50 +1100 Subject: [PATCH 091/263] Record S3 retry stats with APM metrics (#102505) S3 retry stats are added in #100272 using log messages. Changes in this PR are intended to replace the log based stats with APM metrics. --- .../s3/S3BlobStoreRepositoryMetricsTests.java | 217 ++++++++++++++++++ .../s3/S3BlobStoreRepositoryTests.java | 60 +---- .../repositories/s3/S3BlobStore.java | 72 +++++- .../repositories/RepositoriesModule.java | 17 ++ .../telemetry/TestTelemetryPlugin.java | 4 + 5 files changed, 311 insertions(+), 59 deletions(-) create mode 100644 modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java new file mode 100644 index 0000000000000..2c759abc1e437 --- /dev/null +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryMetricsTests.java @@ -0,0 +1,217 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.s3; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.repositories.s3.S3BlobStore.Operation; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.LinkedBlockingQueue; + +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_EXCEPTIONS_COUNT; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_EXCEPTIONS_HISTOGRAM; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_OPERATIONS_COUNT; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_REQUESTS_COUNT; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_THROTTLES_COUNT; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_THROTTLES_HISTOGRAM; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_UNSUCCESSFUL_OPERATIONS_COUNT; +import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR; +import static org.elasticsearch.rest.RestStatus.NOT_FOUND; +import static org.elasticsearch.rest.RestStatus.TOO_MANY_REQUESTS; +import static org.hamcrest.Matchers.equalTo; + +@SuppressForbidden(reason = "this test uses a HttpServer to emulate an S3 endpoint") +// Need to set up a new cluster for each test because cluster settings use randomized authentication settings +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) +public class S3BlobStoreRepositoryMetricsTests extends S3BlobStoreRepositoryTests { + + private final Queue errorStatusQueue = new LinkedBlockingQueue<>(); + + // Always create erroneous handler + @Override + protected Map createHttpHandlers() { + return Collections.singletonMap( + "/bucket", + new S3StatsCollectorHttpHandler(new S3MetricErroneousHttpHandler(new S3BlobStoreHttpHandler("bucket"), errorStatusQueue)) + ); + } + + @Override + protected HttpHandler createErroneousHttpHandler(final HttpHandler delegate) { + return delegate; + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + final Settings settings = super.nodeSettings(nodeOrdinal, otherSettings); + return Settings.builder() + .put(settings) + .put(S3ClientSettings.MAX_RETRIES_SETTING.getConcreteSettingForNamespace("test").getKey(), 4) + .build(); + } + + public void testMetricsWithErrors() throws IOException { + final String repository = createRepository(randomRepositoryName()); + + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final var blobStoreRepository = (BlobStoreRepository) internalCluster().getInstance(RepositoriesService.class, dataNodeName) + .repository(repository); + final BlobStore blobStore = blobStoreRepository.blobStore(); + final TestTelemetryPlugin plugin = internalCluster().getInstance(PluginsService.class, dataNodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + + plugin.resetMeter(); + + final OperationPurpose purpose = randomFrom(OperationPurpose.values()); + final BlobContainer blobContainer = blobStore.blobContainer(BlobPath.EMPTY.add(randomIdentifier())); + final String blobName = randomIdentifier(); + + // Put a blob + final int nPuts = randomIntBetween(1, 3); + for (int i = 0; i < nPuts; i++) { + final long batch = i + 1; + addErrorStatus(INTERNAL_SERVER_ERROR, TOO_MANY_REQUESTS, TOO_MANY_REQUESTS); + blobContainer.writeBlob(purpose, blobName, new BytesArray("blob"), false); + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_COUNT, Operation.PUT_OBJECT), equalTo(4L * batch)); + assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_COUNT, Operation.PUT_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, Operation.PUT_OBJECT), equalTo(0L)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_COUNT, Operation.PUT_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_COUNT, Operation.PUT_OBJECT), equalTo(2L * batch)); + assertThat(getLongHistogramValue(plugin, METRIC_EXCEPTIONS_HISTOGRAM, Operation.PUT_OBJECT), equalTo(batch)); + assertThat(getLongHistogramValue(plugin, METRIC_THROTTLES_HISTOGRAM, Operation.PUT_OBJECT), equalTo(2L * batch)); + } + + // Get not found + final int nGets = randomIntBetween(1, 3); + for (int i = 0; i < nGets; i++) { + final long batch = i + 1; + addErrorStatus(TOO_MANY_REQUESTS, NOT_FOUND); + try { + blobContainer.readBlob(purpose, blobName).close(); + } catch (Exception e) { + // intentional failure + } + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_COUNT, Operation.GET_OBJECT), equalTo(2L * batch)); + assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_COUNT, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_COUNT, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_COUNT, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongHistogramValue(plugin, METRIC_EXCEPTIONS_HISTOGRAM, Operation.GET_OBJECT), equalTo(batch)); + assertThat(getLongHistogramValue(plugin, METRIC_THROTTLES_HISTOGRAM, Operation.GET_OBJECT), equalTo(batch)); + } + + // List retry exhausted + final int nLists = randomIntBetween(1, 3); + for (int i = 0; i < nLists; i++) { + final long batch = i + 1; + addErrorStatus(TOO_MANY_REQUESTS, TOO_MANY_REQUESTS, TOO_MANY_REQUESTS, TOO_MANY_REQUESTS, TOO_MANY_REQUESTS); + try { + blobContainer.listBlobs(purpose); + } catch (Exception e) { + // intentional failure + } + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_COUNT, Operation.LIST_OBJECTS), equalTo(5L * batch)); + assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_COUNT, Operation.LIST_OBJECTS), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, Operation.LIST_OBJECTS), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_COUNT, Operation.LIST_OBJECTS), equalTo(batch)); + assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_COUNT, Operation.LIST_OBJECTS), equalTo(5L * batch)); + assertThat(getLongHistogramValue(plugin, METRIC_EXCEPTIONS_HISTOGRAM, Operation.LIST_OBJECTS), equalTo(batch)); + assertThat(getLongHistogramValue(plugin, METRIC_THROTTLES_HISTOGRAM, Operation.LIST_OBJECTS), equalTo(5L * batch)); + } + + // Delete to clean up + blobContainer.deleteBlobsIgnoringIfNotExists(purpose, Iterators.single(blobName)); + assertThat(getLongCounterValue(plugin, METRIC_REQUESTS_COUNT, Operation.DELETE_OBJECTS), equalTo(1L)); + assertThat(getLongCounterValue(plugin, METRIC_OPERATIONS_COUNT, Operation.DELETE_OBJECTS), equalTo(1L)); + assertThat(getLongCounterValue(plugin, METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, Operation.DELETE_OBJECTS), equalTo(0L)); + assertThat(getLongCounterValue(plugin, METRIC_EXCEPTIONS_COUNT, Operation.DELETE_OBJECTS), equalTo(0L)); + assertThat(getLongCounterValue(plugin, METRIC_THROTTLES_COUNT, Operation.DELETE_OBJECTS), equalTo(0L)); + assertThat(getLongHistogramValue(plugin, METRIC_EXCEPTIONS_HISTOGRAM, Operation.DELETE_OBJECTS), equalTo(0L)); + assertThat(getLongHistogramValue(plugin, METRIC_THROTTLES_HISTOGRAM, Operation.DELETE_OBJECTS), equalTo(0L)); + } + + private void addErrorStatus(RestStatus... statuses) { + errorStatusQueue.addAll(Arrays.asList(statuses)); + } + + private long getLongCounterValue(TestTelemetryPlugin plugin, String instrumentName, Operation operation) { + final List measurements = Measurement.combine(plugin.getLongCounterMeasurement(instrumentName)); + return measurements.stream() + .filter(m -> m.attributes().get("operation") == operation.getKey()) + .mapToLong(Measurement::getLong) + .findFirst() + .orElse(0L); + } + + private long getLongHistogramValue(TestTelemetryPlugin plugin, String instrumentName, Operation operation) { + final List measurements = Measurement.combine(plugin.getLongHistogramMeasurement(instrumentName)); + return measurements.stream() + .filter(m -> m.attributes().get("operation") == operation.getKey()) + .mapToLong(Measurement::getLong) + .findFirst() + .orElse(0L); + } + + @SuppressForbidden(reason = "this test uses a HttpServer to emulate an S3 endpoint") + private static class S3MetricErroneousHttpHandler implements DelegatingHttpHandler { + + private final HttpHandler delegate; + private final Queue errorStatusQueue; + + S3MetricErroneousHttpHandler(HttpHandler delegate, Queue errorStatusQueue) { + this.delegate = delegate; + this.errorStatusQueue = errorStatusQueue; + } + + @Override + public void handle(HttpExchange exchange) throws IOException { + final RestStatus status = errorStatusQueue.poll(); + if (status == null) { + delegate.handle(exchange); + } else if (status == INTERNAL_SERVER_ERROR) { + // Simulate an retryable exception + throw new IOException("ouch"); + } else { + try (exchange) { + drainInputStream(exchange.getRequestBody()); + exchange.sendResponseHeaders(status.getStatus(), -1); + } + } + } + + public HttpHandler getDelegate() { + return delegate; + } + } +} diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index c0d2f9e1ed6f9..5a445a1524da5 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -51,11 +51,7 @@ import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.snapshots.mockstore.BlobStoreWrapper; import org.elasticsearch.telemetry.Measurement; -import org.elasticsearch.telemetry.RecordingInstruments; -import org.elasticsearch.telemetry.RecordingMeterRegistry; import org.elasticsearch.telemetry.TestTelemetryPlugin; -import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestIssueLogging; @@ -141,7 +137,7 @@ protected Settings repositorySettings(String repoName) { @Override protected Collection> nodePlugins() { - return List.of(TestS3RepositoryPlugin.class, TestS3BlobTelemetryPlugin.class); + return List.of(TestS3RepositoryPlugin.class, TestTelemetryPlugin.class); } @Override @@ -269,7 +265,7 @@ public void testMetrics() throws Exception { .getStatsCollectors().collectors; final var plugins = internalCluster().getInstance(PluginsService.class, nodeName) - .filterPlugins(TestS3BlobTelemetryPlugin.class) + .filterPlugins(TestTelemetryPlugin.class) .toList(); assertThat(plugins, hasSize(1)); final List metrics = Measurement.combine(plugins.get(0).getLongCounterMeasurement(METRIC_REQUESTS_COUNT)); @@ -279,6 +275,10 @@ public void testMetrics() throws Exception { equalTo(metrics.stream().map(m -> m.attributes().get("operation")).collect(Collectors.toSet()).size()) ); metrics.forEach(metric -> { + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final S3BlobStore.Operation operation = S3BlobStore.Operation.parse((String) metric.attributes().get("operation")); final S3BlobStore.StatsKey statsKey = new S3BlobStore.StatsKey( operation, @@ -467,7 +467,7 @@ void ensureMultiPartUploadSize(long blobSize) {} } @SuppressForbidden(reason = "this test uses a HttpHandler to emulate an S3 endpoint") - private class S3BlobStoreHttpHandler extends S3HttpHandler implements BlobStoreHttpHandler { + protected class S3BlobStoreHttpHandler extends S3HttpHandler implements BlobStoreHttpHandler { S3BlobStoreHttpHandler(final String bucket) { super(bucket); @@ -501,7 +501,7 @@ private void validateAuthHeader(HttpExchange exchange) { * slow down the test suite. */ @SuppressForbidden(reason = "this test uses a HttpServer to emulate an S3 endpoint") - private static class S3ErroneousHttpHandler extends ErroneousHttpHandler { + protected static class S3ErroneousHttpHandler extends ErroneousHttpHandler { S3ErroneousHttpHandler(final HttpHandler delegate, final int maxErrorsPerRequest) { super(delegate, maxErrorsPerRequest); @@ -518,7 +518,7 @@ protected String requestUniqueId(final HttpExchange exchange) { * HTTP handler that tracks the number of requests performed against S3. */ @SuppressForbidden(reason = "this test uses a HttpServer to emulate an S3 endpoint") - private class S3StatsCollectorHttpHandler extends HttpStatsCollectorHandler { + protected class S3StatsCollectorHttpHandler extends HttpStatsCollectorHandler { S3StatsCollectorHttpHandler(final HttpHandler delegate) { super(delegate); @@ -563,46 +563,4 @@ private boolean isMultiPartUpload(String request) { || Regex.simpleMatch("PUT /*/*?*uploadId=*", request); } } - - public static class TestS3BlobTelemetryPlugin extends TestTelemetryPlugin { - protected final MeterRegistry meter = new RecordingMeterRegistry() { - private final LongCounter longCounter = new RecordingInstruments.RecordingLongCounter(METRIC_REQUESTS_COUNT, recorder) { - @Override - public void increment() { - throw new UnsupportedOperationException(); - } - - @Override - public void incrementBy(long inc) { - throw new UnsupportedOperationException(); - } - - @Override - public void incrementBy(long inc, Map attributes) { - assertThat( - attributes, - allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) - ); - super.incrementBy(inc, attributes); - } - }; - - @Override - protected LongCounter buildLongCounter(String name, String description, String unit) { - return longCounter; - } - - @Override - public LongCounter registerLongCounter(String name, String description, String unit) { - assertThat(name, equalTo(METRIC_REQUESTS_COUNT)); - return super.registerLongCounter(name, description, unit); - } - - @Override - public LongCounter getLongCounter(String name) { - assertThat(name, equalTo(METRIC_REQUESTS_COUNT)); - return super.getLongCounter(name); - } - }; - } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 25a2c4d8e1613..37d076362f396 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -17,6 +17,7 @@ import com.amazonaws.services.s3.model.MultiObjectDeleteException; import com.amazonaws.services.s3.model.StorageClass; import com.amazonaws.util.AWSRequestMetrics; +import com.amazonaws.util.TimingInfo; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -32,6 +33,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.TimeValue; import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongHistogram; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; @@ -43,6 +45,7 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; @@ -50,7 +53,13 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_EXCEPTIONS_COUNT; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_EXCEPTIONS_HISTOGRAM; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_OPERATIONS_COUNT; import static org.elasticsearch.repositories.RepositoriesModule.METRIC_REQUESTS_COUNT; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_THROTTLES_COUNT; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_THROTTLES_HISTOGRAM; +import static org.elasticsearch.repositories.RepositoriesModule.METRIC_UNSUCCESSFUL_OPERATIONS_COUNT; class S3BlobStore implements BlobStore { @@ -82,6 +91,12 @@ class S3BlobStore implements BlobStore { private final Executor snapshotExecutor; private final MeterRegistry meterRegistry; private final LongCounter requestCounter; + private final LongCounter exceptionCounter; + private final LongCounter throttleCounter; + private final LongCounter operationCounter; + private final LongCounter unsuccessfulOperationCounter; + private final LongHistogram exceptionHistogram; + private final LongHistogram throttleHistogram; private final StatsCollectors statsCollectors = new StatsCollectors(); @@ -113,6 +128,12 @@ class S3BlobStore implements BlobStore { this.snapshotExecutor = threadPool.executor(ThreadPool.Names.SNAPSHOT); this.meterRegistry = meterRegistry; this.requestCounter = this.meterRegistry.getLongCounter(METRIC_REQUESTS_COUNT); + this.exceptionCounter = this.meterRegistry.getLongCounter(METRIC_EXCEPTIONS_COUNT); + this.throttleCounter = this.meterRegistry.getLongCounter(METRIC_THROTTLES_COUNT); + this.operationCounter = this.meterRegistry.getLongCounter(METRIC_OPERATIONS_COUNT); + this.unsuccessfulOperationCounter = this.meterRegistry.getLongCounter(METRIC_UNSUCCESSFUL_OPERATIONS_COUNT); + this.exceptionHistogram = this.meterRegistry.getLongHistogram(METRIC_EXCEPTIONS_HISTOGRAM); + this.throttleHistogram = this.meterRegistry.getLongHistogram(METRIC_THROTTLES_HISTOGRAM); s3RequestRetryStats = new S3RequestRetryStats(getMaxRetries()); threadPool.scheduleWithFixedDelay(() -> { var priorRetryStats = s3RequestRetryStats; @@ -168,10 +189,40 @@ private IgnoreNoResponseMetricsCollector(Operation operation, OperationPurpose p @Override public final void collectMetrics(Request request, Response response) { + assert assertConsistencyBetweenHttpRequestAndOperation(request, operation); + final AWSRequestMetrics awsRequestMetrics = request.getAWSRequestMetrics(); + final TimingInfo timingInfo = awsRequestMetrics.getTimingInfo(); + final long requestCount = getCountForMetric(timingInfo, AWSRequestMetrics.Field.RequestCount); + final long exceptionCount = getCountForMetric(timingInfo, AWSRequestMetrics.Field.Exception); + final long throttleCount = getCountForMetric(timingInfo, AWSRequestMetrics.Field.ThrottleException); + + // For stats reported by API, do not collect stats for null response for BWC. + // See https://github.com/elastic/elasticsearch/pull/71406 + // TODO Is this BWC really necessary? if (response != null) { - assert assertConsistencyBetweenHttpRequestAndOperation(request, operation); - counter.add(getRequestCount(request)); - requestCounter.incrementBy(getRequestCount(request), attributes); + counter.add(requestCount); + } + + // We collect all metrics regardless whether response is null + // There are many situations other than network where a null response can be returned. + // In addition, we are interested in the stats when there is a network outage. + final int numberOfAwsErrors = Optional.ofNullable(awsRequestMetrics.getProperty(AWSRequestMetrics.Field.AWSErrorCode)) + .map(List::size) + .orElse(0); + + operationCounter.incrementBy(1, attributes); + if (numberOfAwsErrors == requestCount) { + unsuccessfulOperationCounter.incrementBy(1, attributes); + } + + requestCounter.incrementBy(requestCount, attributes); + if (exceptionCount > 0) { + exceptionCounter.incrementBy(exceptionCount, attributes); + exceptionHistogram.record(exceptionCount, attributes); + } + if (throttleCount > 0) { + throttleCounter.incrementBy(throttleCount, attributes); + throttleHistogram.record(throttleCount, attributes); } } @@ -197,13 +248,18 @@ private boolean assertConsistencyBetweenHttpRequestAndOperation(Request reque } } - private static long getRequestCount(Request request) { - Number requestCount = request.getAWSRequestMetrics().getTimingInfo().getCounter(AWSRequestMetrics.Field.RequestCount.name()); - if (requestCount == null) { - logger.warn("Expected request count to be tracked for request [{}] but found not count.", request); + private static long getCountForMetric(TimingInfo info, AWSRequestMetrics.Field field) { + var count = info.getCounter(field.name()); + if (count == null) { + if (field == AWSRequestMetrics.Field.RequestCount) { + final String message = "Expected request count to be tracked but found not count."; + assert false : message; + logger.warn(message); + } return 0L; + } else { + return count.longValue(); } - return requestCount.longValue(); } @Override diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java index 630c0ea673c8b..b066b4c5a329e 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java @@ -36,6 +36,13 @@ public final class RepositoriesModule { public static final String METRIC_REQUESTS_COUNT = "es.repositories.requests.count"; + public static final String METRIC_EXCEPTIONS_COUNT = "es.repositories.exceptions.count"; + public static final String METRIC_THROTTLES_COUNT = "es.repositories.throttles.count"; + public static final String METRIC_OPERATIONS_COUNT = "es.repositories.operations.count"; + public static final String METRIC_UNSUCCESSFUL_OPERATIONS_COUNT = "es.repositories.operations.unsuccessful.count"; + public static final String METRIC_EXCEPTIONS_HISTOGRAM = "es.repositories.exceptions.histogram"; + public static final String METRIC_THROTTLES_HISTOGRAM = "es.repositories.throttles.histogram"; + private final RepositoriesService repositoriesService; public RepositoriesModule( @@ -49,6 +56,16 @@ public RepositoriesModule( TelemetryProvider telemetryProvider ) { telemetryProvider.getMeterRegistry().registerLongCounter(METRIC_REQUESTS_COUNT, "repository request counter", "unit"); + telemetryProvider.getMeterRegistry().registerLongCounter(METRIC_EXCEPTIONS_COUNT, "repository request exception counter", "unit"); + telemetryProvider.getMeterRegistry().registerLongCounter(METRIC_THROTTLES_COUNT, "repository operation counter", "unit"); + telemetryProvider.getMeterRegistry() + .registerLongCounter(METRIC_OPERATIONS_COUNT, "repository unsuccessful operation counter", "unit"); + telemetryProvider.getMeterRegistry() + .registerLongCounter(METRIC_UNSUCCESSFUL_OPERATIONS_COUNT, "repository request throttle counter", "unit"); + telemetryProvider.getMeterRegistry() + .registerLongHistogram(METRIC_EXCEPTIONS_HISTOGRAM, "repository request exception histogram", "unit"); + telemetryProvider.getMeterRegistry() + .registerLongHistogram(METRIC_THROTTLES_HISTOGRAM, "repository request throttle histogram", "unit"); Map factories = new HashMap<>(); factories.put( FsRepository.TYPE, diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java b/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java index 53aef542f0d1a..e237f6c9bbb4b 100644 --- a/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java @@ -65,6 +65,10 @@ public List getLongHistogramMeasurement(String name) { return meter.getRecorder().getMeasurements(InstrumentType.LONG_HISTOGRAM, name); } + public void resetMeter() { + meter.getRecorder().resetCalls(); + } + @Override public TelemetryProvider getTelemetryProvider(Settings settings) { return new TelemetryProvider() { From 3edc5b70c114042d4311bdc68b051b5f8c6ab794 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 30 Nov 2023 09:15:05 +0100 Subject: [PATCH 092/263] [Connector API] Implementation of list, get, delete connector endpoints (#102592) Add list, get, delete endpoints and parsers for Connector document data structures, ensuring payload validation and parsing. Includes BWC, Yaml, unit tests for verification. --- .../rest-api-spec/api/connector.delete.json | 32 ++ .../rest-api-spec/api/connector.get.json | 32 ++ .../rest-api-spec/api/connector.list.json | 38 ++ .../rest-api-spec/api/connector.put.json | 3 +- .../test/entsearch/300_connector_put.yml | 56 ++ .../test/entsearch/310_connector_list.yml | 108 ++++ .../test/entsearch/320_connector_delete.yml | 34 ++ .../xpack/application/EnterpriseSearch.java | 27 +- .../application/connector/Connector.java | 220 +++++++- .../connector/ConnectorCustomSchedule.java | 20 +- .../connector/ConnectorFeatures.java | 309 +++++++--- .../connector/ConnectorFiltering.java | 527 +++--------------- .../connector/ConnectorIndexService.java | 160 ++++++ .../connector/ConnectorIngestPipeline.java | 38 ++ .../connector/ConnectorListItem.java | 61 -- .../connector/ConnectorStatus.java | 9 + .../connector/ConnectorSyncInfo.java | 22 +- .../connector/ConnectorSyncStatus.java | 9 + .../action/DeleteConnectorAction.java | 109 ++++ .../connector/action/GetConnectorAction.java | 151 +++++ .../connector/action/ListConnectorAction.java | 149 +++++ .../action/RestDeleteConnectorAction.java | 38 ++ .../action/RestGetConnectorAction.java | 37 ++ .../action/RestListConnectorAction.java | 42 ++ .../TransportDeleteConnectorAction.java | 48 ++ .../action/TransportGetConnectorAction.java | 45 ++ .../action/TransportListConnectorAction.java | 52 ++ .../filtering/FilteringAdvancedSnippet.java | 142 +++++ .../connector/filtering/FilteringPolicy.java | 29 + .../connector/filtering/FilteringRule.java | 237 ++++++++ .../filtering/FilteringRuleCondition.java | 38 ++ .../connector/filtering/FilteringRules.java | 145 +++++ .../filtering/FilteringValidation.java | 117 ++++ .../filtering/FilteringValidationInfo.java | 127 +++++ .../filtering/FilteringValidationState.java | 30 + .../ConnectorCustomScheduleTests.java | 42 ++ .../connector/ConnectorFeaturesTests.java | 83 +++ .../connector/ConnectorFilteringTests.java | 72 +++ .../connector/ConnectorIndexServiceTests.java | 98 +++- .../ConnectorIngestPipelineTests.java | 28 + .../connector/ConnectorTestUtils.java | 80 ++- .../application/connector/ConnectorTests.java | 152 +++++ ...ectorActionRequestBWCSerializingTests.java | 43 ++ ...ectorActionRequestBWCSerializingTests.java | 43 ++ ...ctorActionResponseBWCSerializingTests.java | 48 ++ ...ectorActionRequestBWCSerializingTests.java | 45 ++ ...ctorActionResponseBWCSerializingTests.java | 37 ++ .../xpack/security/operator/Constants.java | 3 + 48 files changed, 3350 insertions(+), 665 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/320_connector_delete.yml delete mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorListItem.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestGetConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportGetConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringAdvancedSnippet.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringPolicy.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRule.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRuleCondition.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidation.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationInfo.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationState.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json new file mode 100644 index 0000000000000..6cfc0ffcaf02b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json @@ -0,0 +1,32 @@ +{ + "connector.delete": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Deletes a connector." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}", + "methods": [ + "DELETE" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be deleted." + } + } + } + ] + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json new file mode 100644 index 0000000000000..d866920324852 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json @@ -0,0 +1,32 @@ +{ + "connector.get": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Returns the details about a connector." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}", + "methods": [ + "GET" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be returned." + } + } + } + ] + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json new file mode 100644 index 0000000000000..a1e5ddcc5d686 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json @@ -0,0 +1,38 @@ +{ + "connector.list": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Lists all connectors." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector", + "methods": [ + "GET" + ] + } + ] + }, + "params": { + "from": { + "type": "int", + "default": 0, + "description": "Starting offset (default: 0)" + }, + "size": { + "type": "int", + "default": 100, + "description": "specifies a max number of results to get (default: 100)" + } + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json index 0ab5c18671040..8511b870a2d12 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json @@ -5,7 +5,8 @@ "description": "Creates or updates a connector." }, "stability": "experimental", - "visibility": "public", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", "headers": { "accept": [ "application/json" diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml index 315d581abb6a2..464b64a2b24a3 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml @@ -18,7 +18,63 @@ setup: - match: { result: 'created' } + - do: + connector.get: + connector_id: test-connector + + - match: { connector_id: test-connector } + - match: { index_name: search-test } + - match: { name: my-connector } + - match: { language: pl } + - match: { is_native: false } + - match: { service_type: super-connector } + +--- +'Create Connector - Default values are initialized correctly': + - do: + connector.put: + connector_id: test-connector-with-defaults + body: + index_name: search-test + + - match: { result: 'created' } + + - do: + connector.get: + connector_id: test-connector-with-defaults + + - match: { connector_id: test-connector-with-defaults } + - match: { index_name: search-test } + - match: { is_native: false } + - match: { sync_now: false } + - match: { status: created } + - match: { configuration: {} } + - match: { custom_scheduling: {} } + - match: { filtering.0.domain: DEFAULT } + +--- +'Create Connector - Native connector is initialized correctly': + - do: + connector.put: + connector_id: test-connector-native + body: + index_name: search-test + is_native: true + + - match: { result: 'created' } + + - do: + connector.get: + connector_id: test-connector-native + - match: { connector_id: test-connector-native } + - match: { index_name: search-test } + - match: { is_native: true } + - match: { sync_now: false } + - match: { status: needs_configuration } + - match: { configuration: {} } + - match: { custom_scheduling: {} } + - match: { filtering.0.domain: DEFAULT } --- 'Create Connector - Resource already exists': diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml new file mode 100644 index 0000000000000..36cd1c283f7e8 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml @@ -0,0 +1,108 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + + - do: + connector.put: + connector_id: connector-a + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + - do: + connector.put: + connector_id: connector-c + body: + index_name: search-3-test + name: my-connector + language: nl + is_native: false + service_type: super-connector + - do: + connector.put: + connector_id: connector-b + body: + index_name: search-2-test + name: my-connector + language: en + is_native: true + service_type: super-connector + +--- +"List Connectors": + - do: + connector.list: { } + + - match: { count: 3 } + + # Alphabetical order by connector_id for results + - match: { results.0.connector_id: "connector-a" } + - match: { results.0.index_name: "search-1-test" } + - match: { results.0.language: "pl" } + + - match: { results.1.connector_id: "connector-b" } + - match: { results.1.index_name: "search-2-test" } + - match: { results.1.language: "en" } + + - match: { results.2.connector_id: "connector-c" } + - match: { results.2.index_name: "search-3-test" } + - match: { results.2.language: "nl" } + + +--- +"List Connectors - with from": + - do: + connector.list: + from: 1 + + - match: { count: 3 } + + # Alphabetical order by connector_id for results + - match: { results.0.connector_id: "connector-b" } + - match: { results.0.index_name: "search-2-test" } + - match: { results.0.language: "en" } + + - match: { results.1.connector_id: "connector-c" } + - match: { results.1.index_name: "search-3-test" } + - match: { results.1.language: "nl" } + +--- +"List Connector- with size": + - do: + connector.list: + size: 2 + + - match: { count: 3 } + + # Alphabetical order by connector_id for results + - match: { results.0.connector_id: "connector-a" } + - match: { results.0.index_name: "search-1-test" } + - match: { results.0.language: "pl" } + + - match: { results.1.connector_id: "connector-b" } + - match: { results.1.index_name: "search-2-test" } + - match: { results.1.language: "en" } + +--- +"List Connector - empty": + - do: + connector.delete: + connector_id: connector-a + + - do: + connector.delete: + connector_id: connector-b + + - do: + connector.delete: + connector_id: connector-c + + - do: + connector.list: { } + + - match: { count: 0 } + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/320_connector_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/320_connector_delete.yml new file mode 100644 index 0000000000000..275936084144b --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/320_connector_delete.yml @@ -0,0 +1,34 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector-to-delete + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Delete Connector": + - do: + connector.delete: + connector_id: test-connector-to-delete + + - match: { acknowledged: true } + + - do: + catch: "missing" + connector.get: + connector_id: test-connector-to-delete + +--- +"Delete Connector - Connector does not exist": + - do: + catch: "missing" + connector.delete: + connector_id: test-nonexistent-connector diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 50b6410a624da..ce54474994acf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -42,8 +42,17 @@ import org.elasticsearch.xpack.application.analytics.ingest.AnalyticsEventIngestConfig; import org.elasticsearch.xpack.application.connector.ConnectorAPIFeature; import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; +import org.elasticsearch.xpack.application.connector.action.DeleteConnectorAction; +import org.elasticsearch.xpack.application.connector.action.GetConnectorAction; +import org.elasticsearch.xpack.application.connector.action.ListConnectorAction; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestDeleteConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; @@ -156,7 +165,14 @@ protected XPackLicenseState getLicenseState() { // Connectors if (ConnectorAPIFeature.isEnabled()) { - actionHandlers.add(new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class)); + actionHandlers.addAll( + List.of( + new ActionHandler<>(DeleteConnectorAction.INSTANCE, TransportDeleteConnectorAction.class), + new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), + new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), + new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class) + ) + ); } return Collections.unmodifiableList(actionHandlers); @@ -203,7 +219,14 @@ public List getRestHandlers( // Connectors if (ConnectorAPIFeature.isEnabled()) { - restHandlers.add(new RestPutConnectorAction()); + restHandlers.addAll( + List.of( + new RestDeleteConnectorAction(), + new RestGetConnectorAction(), + new RestListConnectorAction(), + new RestPutConnectorAction() + ) + ); } return Collections.unmodifiableList(restHandlers); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index db85b4a076906..d0787e62113ec 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -7,20 +7,33 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.time.Instant; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * Represents a Connector in the Elasticsearch ecosystem. Connectors are used for integrating * and synchronizing external data sources with Elasticsearch. Each Connector instance encapsulates @@ -55,7 +68,7 @@ public class Connector implements Writeable, ToXContentObject { @Nullable private final Map configuration; // TODO: add explicit types @Nullable - private final ConnectorCustomSchedule customScheduling; + private final Map customScheduling; @Nullable private final String description; @Nullable @@ -112,7 +125,7 @@ private Connector( String connectorId, String apiKeyId, Map configuration, - ConnectorCustomSchedule customScheduling, + Map customScheduling, String description, String error, ConnectorFeatures features, @@ -153,8 +166,8 @@ private Connector( public Connector(StreamInput in) throws IOException { this.connectorId = in.readString(); this.apiKeyId = in.readOptionalString(); - this.configuration = in.readMap(StreamInput::readString, StreamInput::readGenericValue); - this.customScheduling = in.readOptionalWriteable(ConnectorCustomSchedule::new); + this.configuration = in.readMap(StreamInput::readGenericValue); + this.customScheduling = in.readMap(ConnectorCustomSchedule::new); this.description = in.readOptionalString(); this.error = in.readOptionalString(); this.features = in.readOptionalWriteable(ConnectorFeatures::new); @@ -172,25 +185,178 @@ public Connector(StreamInput in) throws IOException { this.syncNow = in.readBoolean(); } - private static final ParseField ID_FIELD = new ParseField("connector_id"); - private static final ParseField API_KEY_ID_FIELD = new ParseField("api_key_id"); - private static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); - private static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); - private static final ParseField DESCRIPTION_FIELD = new ParseField("description"); - private static final ParseField ERROR_FIELD = new ParseField("error"); - private static final ParseField FEATURES_FIELD = new ParseField("features"); - private static final ParseField FILTERING_FIELD = new ParseField("filtering"); - private static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); - private static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); - private static final ParseField LANGUAGE_FIELD = new ParseField("language"); - - private static final ParseField NAME_FIELD = new ParseField("name"); - private static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); - private static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); - private static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); - private static final ParseField STATUS_FIELD = new ParseField("status"); - private static final ParseField SYNC_CURSOR_FIELD = new ParseField("sync_cursor"); - private static final ParseField SYNC_NOW_FIELD = new ParseField("sync_now"); + static final ParseField ID_FIELD = new ParseField("connector_id"); + static final ParseField API_KEY_ID_FIELD = new ParseField("api_key_id"); + static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); + static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); + static final ParseField DESCRIPTION_FIELD = new ParseField("description"); + static final ParseField ERROR_FIELD = new ParseField("error"); + static final ParseField FEATURES_FIELD = new ParseField("features"); + static final ParseField FILTERING_FIELD = new ParseField("filtering"); + static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); + static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); + static final ParseField LANGUAGE_FIELD = new ParseField("language"); + static final ParseField NAME_FIELD = new ParseField("name"); + static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); + static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); + static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); + static final ParseField STATUS_FIELD = new ParseField("status"); + static final ParseField SYNC_CURSOR_FIELD = new ParseField("sync_cursor"); + static final ParseField SYNC_NOW_FIELD = new ParseField("sync_now"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("connector", true, (args) -> { + int i = 0; + return new Builder().setConnectorId((String) args[i++]) + .setApiKeyId((String) args[i++]) + .setConfiguration((Map) args[i++]) + .setCustomScheduling((Map) args[i++]) + .setDescription((String) args[i++]) + .setError((String) args[i++]) + .setFeatures((ConnectorFeatures) args[i++]) + .setFiltering((List) args[i++]) + .setIndexName((String) args[i++]) + .setIsNative((Boolean) args[i++]) + .setLanguage((String) args[i++]) + .setSyncInfo( + new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) + .setLastAccessControlSyncScheduledAt((Instant) args[i++]) + .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastDeletedDocumentCount((Long) args[i++]) + .setLastIncrementalSyncScheduledAt((Instant) args[i++]) + .setLastIndexedDocumentCount((Long) args[i++]) + .setLastSeen((Instant) args[i++]) + .setLastSyncError((String) args[i++]) + .setLastSyncScheduledAt((Instant) args[i++]) + .setLastSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastSynced((Instant) args[i++]) + .build() + ) + .setName((String) args[i++]) + .setPipeline((ConnectorIngestPipeline) args[i++]) + .setScheduling((ConnectorScheduling) args[i++]) + .setServiceType((String) args[i++]) + .setStatus((ConnectorStatus) args[i++]) + .setSyncCursor(args[i++]) + .setSyncNow((Boolean) args[i]) + .build(); + }); + + static { + PARSER.declareString(constructorArg(), ID_FIELD); + PARSER.declareString(optionalConstructorArg(), API_KEY_ID_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (parser, context) -> parser.map(), + CONFIGURATION_FIELD, + ObjectParser.ValueType.OBJECT + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.map(HashMap::new, ConnectorCustomSchedule::fromXContent), + CUSTOM_SCHEDULING_FIELD, + ObjectParser.ValueType.OBJECT + ); + PARSER.declareString(optionalConstructorArg(), DESCRIPTION_FIELD); + PARSER.declareString(optionalConstructorArg(), ERROR_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ConnectorFeatures.fromXContent(p), + FEATURES_FIELD, + ObjectParser.ValueType.OBJECT + ); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ConnectorFiltering.fromXContent(p), FILTERING_FIELD); + PARSER.declareString(optionalConstructorArg(), INDEX_NAME_FIELD); + PARSER.declareBoolean(optionalConstructorArg(), IS_NATIVE_FIELD); + PARSER.declareString(optionalConstructorArg(), LANGUAGE_FIELD); + + PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ConnectorSyncStatus.connectorSyncStatus(p.text()), + ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + ConnectorSyncInfo.LAST_SEEN_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ConnectorSyncStatus.connectorSyncStatus(p.text()), + ConnectorSyncInfo.LAST_SYNC_STATUS_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + ConnectorSyncInfo.LAST_SYNCED_FIELD, + ObjectParser.ValueType.STRING + ); + + PARSER.declareString(optionalConstructorArg(), NAME_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ConnectorIngestPipeline.fromXContent(p), + PIPELINE_FIELD, + ObjectParser.ValueType.OBJECT + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ConnectorScheduling.fromXContent(p), + SCHEDULING_FIELD, + ObjectParser.ValueType.OBJECT + ); + PARSER.declareString(optionalConstructorArg(), SERVICE_TYPE_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ConnectorStatus.connectorStatus(p.text()), + STATUS_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField( + optionalConstructorArg(), + (parser, context) -> parser.map(), + SYNC_CURSOR_FIELD, + ObjectParser.ValueType.OBJECT_OR_NULL + ); + PARSER.declareBoolean(optionalConstructorArg(), SYNC_NOW_FIELD); + } + + public static Connector fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return Connector.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector document.", e); + } + } + + public static Connector fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { @@ -255,8 +421,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void writeTo(StreamOutput out) throws IOException { out.writeString(connectorId); out.writeOptionalString(apiKeyId); - out.writeMap(configuration, StreamOutput::writeString, StreamOutput::writeGenericValue); - out.writeOptionalWriteable(customScheduling); + out.writeMap(configuration, StreamOutput::writeGenericValue); + out.writeMap(customScheduling, StreamOutput::writeWriteable); out.writeOptionalString(description); out.writeOptionalString(error); out.writeOptionalWriteable(features); @@ -334,7 +500,7 @@ public static class Builder { private String connectorId; private String apiKeyId; private Map configuration = Collections.emptyMap(); - private ConnectorCustomSchedule customScheduling; + private Map customScheduling = Collections.emptyMap(); private String description; private String error; private ConnectorFeatures features; @@ -366,7 +532,7 @@ public Builder setConfiguration(Map configuration) { return this; } - public Builder setCustomScheduling(ConnectorCustomSchedule customScheduling) { + public Builder setCustomScheduling(Map customScheduling) { this.customScheduling = customScheduling; return this; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java index 081a0245f9279..81239610c3186 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorCustomSchedule.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -17,6 +20,8 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.scheduler.Cron; import java.io.IOException; @@ -94,7 +99,12 @@ public ConnectorCustomSchedule(StreamInput in) throws IOException { ); PARSER.declareBoolean(constructorArg(), ENABLED_FIELD); PARSER.declareString(constructorArg(), INTERVAL_FIELD); - PARSER.declareString(optionalConstructorArg(), LAST_SYNCED_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_SYNCED_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); PARSER.declareString(constructorArg(), NAME_FIELD); } @@ -102,6 +112,14 @@ public static ConnectorCustomSchedule fromXContent(XContentParser parser) throws return PARSER.parse(parser, null); } + public static ConnectorCustomSchedule fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorCustomSchedule.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector custom schedule.", e); + } + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFeatures.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFeatures.java index b90b230381b8e..51aa110342fe9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFeatures.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFeatures.java @@ -7,35 +7,41 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +/** + * The {@link ConnectorFeatures} class represents feature flags for a connector. + */ public class ConnectorFeatures implements Writeable, ToXContentObject { @Nullable - private final Boolean documentLevelSecurityEnabled; + private final FeatureEnabled documentLevelSecurityEnabled; @Nullable private final Boolean filteringAdvancedConfigEnabled; @Nullable private final Boolean filteringRulesEnabled; @Nullable - private final Boolean incrementalSyncEnabled; - @Nullable - private final Boolean syncRulesAdvancedEnabled; + private final FeatureEnabled incrementalSyncEnabled; @Nullable - private final Boolean syncRulesBasicEnabled; + private final SyncRulesFeatures syncRulesFeatures; /** * Constructs a new instance of ConnectorFeatures. @@ -43,77 +49,76 @@ public class ConnectorFeatures implements Writeable, ToXContentObject { * @param documentLevelSecurityEnabled A flag indicating whether document-level security is enabled. * @param filteringAdvancedConfig A flag indicating whether advanced filtering configuration is enabled. * @param filteringRules A flag indicating whether filtering rules are enabled. - * @param incrementalSyncEnabled A flag indicating whether incremental synchronization is enabled. - * @param syncRulesAdvancedEnabled A flag indicating whether advanced synchronization rules are enabled. - * @param syncRulesBasicEnabled A flag indicating whether basic synchronization rules are enabled. + * @param incrementalSyncEnabled A flag indicating whether incremental sync is enabled. + * @param syncRulesFeatures An {@link SyncRulesFeatures} object indicating whether basic and advanced sync rules are enabled. */ private ConnectorFeatures( - Boolean documentLevelSecurityEnabled, + FeatureEnabled documentLevelSecurityEnabled, Boolean filteringAdvancedConfig, Boolean filteringRules, - Boolean incrementalSyncEnabled, - Boolean syncRulesAdvancedEnabled, - Boolean syncRulesBasicEnabled + FeatureEnabled incrementalSyncEnabled, + SyncRulesFeatures syncRulesFeatures ) { this.documentLevelSecurityEnabled = documentLevelSecurityEnabled; this.filteringAdvancedConfigEnabled = filteringAdvancedConfig; this.filteringRulesEnabled = filteringRules; this.incrementalSyncEnabled = incrementalSyncEnabled; - this.syncRulesAdvancedEnabled = syncRulesAdvancedEnabled; - this.syncRulesBasicEnabled = syncRulesBasicEnabled; + this.syncRulesFeatures = syncRulesFeatures; } public ConnectorFeatures(StreamInput in) throws IOException { - this.documentLevelSecurityEnabled = in.readOptionalBoolean(); + this.documentLevelSecurityEnabled = in.readOptionalWriteable(FeatureEnabled::new); this.filteringAdvancedConfigEnabled = in.readOptionalBoolean(); this.filteringRulesEnabled = in.readOptionalBoolean(); - this.incrementalSyncEnabled = in.readOptionalBoolean(); - this.syncRulesAdvancedEnabled = in.readOptionalBoolean(); - this.syncRulesBasicEnabled = in.readOptionalBoolean(); + this.incrementalSyncEnabled = in.readOptionalWriteable(FeatureEnabled::new); + this.syncRulesFeatures = in.readOptionalWriteable(SyncRulesFeatures::new); } private static final ParseField DOCUMENT_LEVEL_SECURITY_ENABLED_FIELD = new ParseField("document_level_security"); private static final ParseField FILTERING_ADVANCED_CONFIG_ENABLED_FIELD = new ParseField("filtering_advanced_config"); private static final ParseField FILTERING_RULES_ENABLED_FIELD = new ParseField("filtering_rules"); private static final ParseField INCREMENTAL_SYNC_ENABLED_FIELD = new ParseField("incremental_sync"); - private static final ParseField SYNC_RULES_ADVANCED_ENABLED_FIELD = new ParseField("advanced_sync_rules"); - private static final ParseField SYNC_RULES_BASIC_ENABLED_FIELD = new ParseField("basic_sync_rules"); + private static final ParseField SYNC_RULES_FIELD = new ParseField("sync_rules"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "connector_features", true, - args -> new Builder().setDocumentLevelSecurityEnabled((Boolean) args[0]) - .setFilteringAdvancedConfig((Boolean) args[1]) - .setFilteringRules((Boolean) args[2]) - .setIncrementalSyncEnabled((Boolean) args[3]) - .setSyncRulesAdvancedEnabled((Boolean) args[4]) - .setSyncRulesBasicEnabled((Boolean) args[5]) - .build() + args -> { + return new Builder().setDocumentLevelSecurityEnabled((FeatureEnabled) args[0]) + .setFilteringAdvancedConfig((Boolean) args[1]) + .setFilteringRules((Boolean) args[2]) + .setIncrementalSyncEnabled((FeatureEnabled) args[3]) + .setSyncRulesFeatures((SyncRulesFeatures) args[4]) + .build(); + } ); static { - PARSER.declareBoolean(optionalConstructorArg(), DOCUMENT_LEVEL_SECURITY_ENABLED_FIELD); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> FeatureEnabled.fromXContent(p), DOCUMENT_LEVEL_SECURITY_ENABLED_FIELD); PARSER.declareBoolean(optionalConstructorArg(), FILTERING_ADVANCED_CONFIG_ENABLED_FIELD); PARSER.declareBoolean(optionalConstructorArg(), FILTERING_RULES_ENABLED_FIELD); - PARSER.declareBoolean(optionalConstructorArg(), INCREMENTAL_SYNC_ENABLED_FIELD); - PARSER.declareBoolean(optionalConstructorArg(), SYNC_RULES_ADVANCED_ENABLED_FIELD); - PARSER.declareBoolean(optionalConstructorArg(), SYNC_RULES_BASIC_ENABLED_FIELD); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> FeatureEnabled.fromXContent(p), INCREMENTAL_SYNC_ENABLED_FIELD); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> SyncRulesFeatures.fromXContent(p), SYNC_RULES_FIELD); } public static ConnectorFeatures fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } + public static ConnectorFeatures fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorFeatures.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector features.", e); + } + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { if (documentLevelSecurityEnabled != null) { - builder.startObject(DOCUMENT_LEVEL_SECURITY_ENABLED_FIELD.getPreferredName()); - { - builder.field("enabled", documentLevelSecurityEnabled); - } - builder.endObject(); + builder.field(DOCUMENT_LEVEL_SECURITY_ENABLED_FIELD.getPreferredName(), documentLevelSecurityEnabled); } if (filteringAdvancedConfigEnabled != null) { builder.field(FILTERING_ADVANCED_CONFIG_ENABLED_FIELD.getPreferredName(), filteringAdvancedConfigEnabled); @@ -122,30 +127,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(FILTERING_RULES_ENABLED_FIELD.getPreferredName(), filteringRulesEnabled); } if (incrementalSyncEnabled != null) { - builder.startObject(INCREMENTAL_SYNC_ENABLED_FIELD.getPreferredName()); - { - builder.field("enabled", incrementalSyncEnabled); - } - builder.endObject(); + builder.field(INCREMENTAL_SYNC_ENABLED_FIELD.getPreferredName(), incrementalSyncEnabled); } - builder.startObject("sync_rules"); - { - if (syncRulesAdvancedEnabled != null) { - builder.startObject("advanced"); - { - builder.field("enabled", syncRulesAdvancedEnabled); - } - builder.endObject(); - } - if (syncRulesBasicEnabled != null) { - builder.startObject("basic"); - { - builder.field("enabled", syncRulesBasicEnabled); - } - builder.endObject(); - } + if (syncRulesFeatures != null) { + builder.field(SYNC_RULES_FIELD.getPreferredName(), syncRulesFeatures); } - builder.endObject(); } builder.endObject(); return builder; @@ -153,25 +139,23 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalBoolean(documentLevelSecurityEnabled); + out.writeOptionalWriteable(documentLevelSecurityEnabled); out.writeOptionalBoolean(filteringAdvancedConfigEnabled); out.writeOptionalBoolean(filteringRulesEnabled); - out.writeOptionalBoolean(incrementalSyncEnabled); - out.writeOptionalBoolean(syncRulesAdvancedEnabled); - out.writeOptionalBoolean(syncRulesBasicEnabled); + out.writeOptionalWriteable(incrementalSyncEnabled); + out.writeOptionalWriteable(syncRulesFeatures); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ConnectorFeatures that = (ConnectorFeatures) o; - return Objects.equals(documentLevelSecurityEnabled, that.documentLevelSecurityEnabled) - && Objects.equals(filteringAdvancedConfigEnabled, that.filteringAdvancedConfigEnabled) - && Objects.equals(filteringRulesEnabled, that.filteringRulesEnabled) - && Objects.equals(incrementalSyncEnabled, that.incrementalSyncEnabled) - && Objects.equals(syncRulesAdvancedEnabled, that.syncRulesAdvancedEnabled) - && Objects.equals(syncRulesBasicEnabled, that.syncRulesBasicEnabled); + ConnectorFeatures features = (ConnectorFeatures) o; + return Objects.equals(documentLevelSecurityEnabled, features.documentLevelSecurityEnabled) + && Objects.equals(filteringAdvancedConfigEnabled, features.filteringAdvancedConfigEnabled) + && Objects.equals(filteringRulesEnabled, features.filteringRulesEnabled) + && Objects.equals(incrementalSyncEnabled, features.incrementalSyncEnabled) + && Objects.equals(syncRulesFeatures, features.syncRulesFeatures); } @Override @@ -181,21 +165,19 @@ public int hashCode() { filteringAdvancedConfigEnabled, filteringRulesEnabled, incrementalSyncEnabled, - syncRulesAdvancedEnabled, - syncRulesBasicEnabled + syncRulesFeatures ); } public static class Builder { - private Boolean documentLevelSecurityEnabled; + private FeatureEnabled documentLevelSecurityEnabled; private Boolean filteringAdvancedConfig; private Boolean filteringRules; - private Boolean incrementalSyncEnabled; - private Boolean syncRulesAdvancedEnabled; - private Boolean syncRulesBasicEnabled; + private FeatureEnabled incrementalSyncEnabled; + private SyncRulesFeatures syncRulesFeatures; - public Builder setDocumentLevelSecurityEnabled(Boolean documentLevelSecurityEnabled) { + public Builder setDocumentLevelSecurityEnabled(FeatureEnabled documentLevelSecurityEnabled) { this.documentLevelSecurityEnabled = documentLevelSecurityEnabled; return this; } @@ -210,18 +192,13 @@ public Builder setFilteringRules(Boolean filteringRules) { return this; } - public Builder setIncrementalSyncEnabled(Boolean incrementalSyncEnabled) { + public Builder setIncrementalSyncEnabled(FeatureEnabled incrementalSyncEnabled) { this.incrementalSyncEnabled = incrementalSyncEnabled; return this; } - public Builder setSyncRulesAdvancedEnabled(Boolean syncRulesAdvancedEnabled) { - this.syncRulesAdvancedEnabled = syncRulesAdvancedEnabled; - return this; - } - - public Builder setSyncRulesBasicEnabled(Boolean syncRulesBasicEnabled) { - this.syncRulesBasicEnabled = syncRulesBasicEnabled; + public Builder setSyncRulesFeatures(SyncRulesFeatures syncRulesFeatures) { + this.syncRulesFeatures = syncRulesFeatures; return this; } @@ -231,9 +208,169 @@ public ConnectorFeatures build() { filteringAdvancedConfig, filteringRules, incrementalSyncEnabled, - syncRulesAdvancedEnabled, - syncRulesBasicEnabled + syncRulesFeatures ); } } + + /** + * The {@link FeatureEnabled} class serves as a helper for serializing and deserializing + * feature representations within the Connector context. This class specifically addresses + * the handling of features represented in a nested JSON structure: + * + *
+     *     "my_feature": {"enabled": true}
+     * 
+ */ + public static class FeatureEnabled implements ToXContentObject, Writeable { + + private final boolean enabled; + + public FeatureEnabled(boolean enabled) { + this.enabled = enabled; + } + + public FeatureEnabled(StreamInput in) throws IOException { + this.enabled = in.readBoolean(); + } + + private static final ParseField ENABLED_FIELD = new ParseField("enabled"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_feature_enabled", + true, + args -> new FeatureEnabled((boolean) args[0]) + ); + + static { + PARSER.declareBoolean(optionalConstructorArg(), ENABLED_FIELD); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(ENABLED_FIELD.getPreferredName(), enabled); + } + builder.endObject(); + return builder; + } + + public static FeatureEnabled fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(enabled); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FeatureEnabled that = (FeatureEnabled) o; + return enabled == that.enabled; + } + + @Override + public int hashCode() { + return Objects.hash(enabled); + } + } + + /** + * The {@link SyncRulesFeatures} class represents the feature configuration for advanced and basic + * sync rules in a structured and serializable format. + */ + public static class SyncRulesFeatures implements ToXContentObject, Writeable { + + private final FeatureEnabled syncRulesAdvancedEnabled; + private final FeatureEnabled syncRulesBasicEnabled; + + private SyncRulesFeatures(FeatureEnabled syncRulesAdvancedEnabled, FeatureEnabled syncRulesBasicEnabled) { + this.syncRulesAdvancedEnabled = syncRulesAdvancedEnabled; + this.syncRulesBasicEnabled = syncRulesBasicEnabled; + } + + public SyncRulesFeatures(StreamInput in) throws IOException { + this.syncRulesAdvancedEnabled = in.readOptionalWriteable(FeatureEnabled::new); + this.syncRulesBasicEnabled = in.readOptionalWriteable(FeatureEnabled::new); + } + + private static final ParseField SYNC_RULES_ADVANCED_ENABLED_FIELD = new ParseField("advanced"); + private static final ParseField SYNC_RULES_BASIC_ENABLED_FIELD = new ParseField("basic"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "sync_rules_features", + true, + args -> new Builder().setSyncRulesAdvancedEnabled((FeatureEnabled) args[0]) + .setSyncRulesBasicEnabled((FeatureEnabled) args[1]) + .build() + ); + + static { + PARSER.declareObject(optionalConstructorArg(), (p, c) -> FeatureEnabled.fromXContent(p), SYNC_RULES_ADVANCED_ENABLED_FIELD); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> FeatureEnabled.fromXContent(p), SYNC_RULES_BASIC_ENABLED_FIELD); + } + + public static SyncRulesFeatures fromXContent(XContentParser p) throws IOException { + return PARSER.parse(p, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (syncRulesAdvancedEnabled != null) { + builder.field(SYNC_RULES_ADVANCED_ENABLED_FIELD.getPreferredName(), syncRulesAdvancedEnabled); + } + if (syncRulesBasicEnabled != null) { + builder.field(SYNC_RULES_BASIC_ENABLED_FIELD.getPreferredName(), syncRulesBasicEnabled); + } + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalWriteable(syncRulesAdvancedEnabled); + out.writeOptionalWriteable(syncRulesBasicEnabled); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SyncRulesFeatures that = (SyncRulesFeatures) o; + return Objects.equals(syncRulesAdvancedEnabled, that.syncRulesAdvancedEnabled) + && Objects.equals(syncRulesBasicEnabled, that.syncRulesBasicEnabled); + } + + @Override + public int hashCode() { + return Objects.hash(syncRulesAdvancedEnabled, syncRulesBasicEnabled); + } + + public static class Builder { + + private FeatureEnabled syncRulesAdvancedEnabled; + private FeatureEnabled syncRulesBasicEnabled; + + public Builder setSyncRulesAdvancedEnabled(FeatureEnabled syncRulesAdvancedEnabled) { + this.syncRulesAdvancedEnabled = syncRulesAdvancedEnabled; + return this; + } + + public Builder setSyncRulesBasicEnabled(FeatureEnabled syncRulesBasicEnabled) { + this.syncRulesBasicEnabled = syncRulesBasicEnabled; + return this; + } + + public SyncRulesFeatures build() { + return new SyncRulesFeatures(syncRulesAdvancedEnabled, syncRulesBasicEnabled); + } + } + } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFiltering.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFiltering.java index b20970e80381c..8ade6cdbcc0b1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFiltering.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorFiltering.java @@ -7,20 +7,40 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.filtering.FilteringAdvancedSnippet; +import org.elasticsearch.xpack.application.connector.filtering.FilteringPolicy; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRule; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRuleCondition; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; +import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationInfo; +import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; import java.io.IOException; import java.time.Instant; import java.util.Collections; import java.util.List; -import java.util.Locale; -import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Represents filtering configurations for a connector, encapsulating both active and draft rules. + * The {@link ConnectorFiltering} class stores the current active filtering rules, a domain associated + * with these rules, and any draft filtering rules that are yet to be applied. + */ public class ConnectorFiltering implements Writeable, ToXContentObject { private final FilteringRules active; @@ -46,18 +66,49 @@ public ConnectorFiltering(StreamInput in) throws IOException { this.draft = new FilteringRules(in); } + private static final ParseField ACTIVE_FIELD = new ParseField("active"); + private static final ParseField DOMAIN_FIELD = new ParseField("domain"); + private static final ParseField DRAFT_FIELD = new ParseField("draft"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_filtering", + true, + args -> new ConnectorFiltering.Builder().setActive((FilteringRules) args[0]) + .setDomain((String) args[1]) + .setDraft((FilteringRules) args[2]) + .build() + ); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> FilteringRules.fromXContent(p), ACTIVE_FIELD); + PARSER.declareString(constructorArg(), DOMAIN_FIELD); + PARSER.declareObject(constructorArg(), (p, c) -> FilteringRules.fromXContent(p), DRAFT_FIELD); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field("active", active); - builder.field("domain", domain); - builder.field("draft", draft); + builder.field(ACTIVE_FIELD.getPreferredName(), active); + builder.field(DOMAIN_FIELD.getPreferredName(), domain); + builder.field(DRAFT_FIELD.getPreferredName(), draft); } builder.endObject(); return builder; } + public static ConnectorFiltering fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public static ConnectorFiltering fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorFiltering.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector filtering.", e); + } + } + @Override public void writeTo(StreamOutput out) throws IOException { active.writeTo(out); @@ -104,448 +155,17 @@ public ConnectorFiltering build() { } } - public static class FilteringRules implements Writeable, ToXContentObject { - - private final Instant advancedSnippetCreatedAt; - private final Instant advancedSnippetUpdatedAt; - private final Map advancedSnippetValue; - private final List rules; - private final List validationErrors; - private final FilteringValidationState validationState; - - /** - * Constructs a new FilteringRules instance. - * - * @param advancedSnippetCreatedAt The creation timestamp of the advanced snippet. - * @param advancedSnippetUpdatedAt The update timestamp of the advanced snippet. - * @param advancedSnippetValue The map of the advanced snippet. - * @param rules The list of {@link FilteringRule} objects - * @param validationErrors The list of {@link FilteringValidation} errors for the filtering rules. - * @param validationState The {@link FilteringValidationState} of the filtering rules. - */ - public FilteringRules( - Instant advancedSnippetCreatedAt, - Instant advancedSnippetUpdatedAt, - Map advancedSnippetValue, - List rules, - List validationErrors, - FilteringValidationState validationState - ) { - this.advancedSnippetCreatedAt = advancedSnippetCreatedAt; - this.advancedSnippetUpdatedAt = advancedSnippetUpdatedAt; - this.advancedSnippetValue = advancedSnippetValue; - this.rules = rules; - this.validationErrors = validationErrors; - this.validationState = validationState; - } - - public FilteringRules(StreamInput in) throws IOException { - this.advancedSnippetCreatedAt = in.readInstant(); - this.advancedSnippetUpdatedAt = in.readInstant(); - this.advancedSnippetValue = in.readMap(StreamInput::readString, StreamInput::readGenericValue); - this.rules = in.readCollectionAsList(FilteringRule::new); - this.validationErrors = in.readCollectionAsList(FilteringValidation::new); - this.validationState = in.readEnum(FilteringValidationState.class); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - { - builder.startObject("advanced_snippet"); - { - builder.field("created_at", advancedSnippetCreatedAt); - builder.field("updated_at", advancedSnippetUpdatedAt); - builder.field("value", advancedSnippetValue); - } - builder.endObject(); - - builder.startArray("rules"); - for (FilteringRule rule : rules) { - rule.toXContent(builder, params); - } - builder.endArray(); - - builder.startObject("validation"); - { - builder.startArray("errors"); - for (FilteringValidation error : validationErrors) { - error.toXContent(builder, params); - } - builder.endArray(); - builder.field("state", validationState.toString()); - } - builder.endObject(); - } - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeInstant(advancedSnippetCreatedAt); - out.writeInstant(advancedSnippetUpdatedAt); - out.writeMap(advancedSnippetValue, StreamOutput::writeString, StreamOutput::writeGenericValue); - out.writeCollection(rules); - out.writeCollection(validationErrors); - out.writeEnum(validationState); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FilteringRules that = (FilteringRules) o; - return Objects.equals(advancedSnippetCreatedAt, that.advancedSnippetCreatedAt) - && Objects.equals(advancedSnippetUpdatedAt, that.advancedSnippetUpdatedAt) - && Objects.equals(advancedSnippetValue, that.advancedSnippetValue) - && Objects.equals(rules, that.rules) - && Objects.equals(validationErrors, that.validationErrors) - && validationState == that.validationState; - } - - @Override - public int hashCode() { - return Objects.hash( - advancedSnippetCreatedAt, - advancedSnippetUpdatedAt, - advancedSnippetValue, - rules, - validationErrors, - validationState - ); - } - - public static class Builder { - - private Instant advancedSnippetCreatedAt; - private Instant advancedSnippetUpdatedAt; - private Map advancedSnippetValue; - private List rules; - private List validationErrors; - private FilteringValidationState validationState; - - public Builder setAdvancedSnippetCreatedAt(Instant advancedSnippetCreatedAt) { - this.advancedSnippetCreatedAt = advancedSnippetCreatedAt; - return this; - } - - public Builder setAdvancedSnippetUpdatedAt(Instant advancedSnippetUpdatedAt) { - this.advancedSnippetUpdatedAt = advancedSnippetUpdatedAt; - return this; - } - - public Builder setAdvancedSnippetValue(Map advancedSnippetValue) { - this.advancedSnippetValue = advancedSnippetValue; - return this; - } - - public Builder setRules(List rules) { - this.rules = rules; - return this; - } - - public Builder setValidationErrors(List validationErrors) { - this.validationErrors = validationErrors; - return this; - } - - public Builder setValidationState(FilteringValidationState validationState) { - this.validationState = validationState; - return this; - } - - public FilteringRules build() { - return new FilteringRules( - advancedSnippetCreatedAt, - advancedSnippetUpdatedAt, - advancedSnippetValue, - rules, - validationErrors, - validationState - ); - } - } - } - - public static class FilteringRule implements Writeable, ToXContentObject { - - private final Instant createdAt; - private final String field; - private final String id; - private final Integer order; - private final FilteringPolicy policy; - private final FilteringRuleCondition rule; - private final Instant updatedAt; - private final String value; - - /** - * Constructs a new FilteringRule instance. - * - * @param createdAt The creation timestamp of the filtering rule. - * @param field The field associated with the filtering rule. - * @param id The identifier of the filtering rule. - * @param order The order of the filtering rule. - * @param policy The {@link FilteringPolicy} of the filtering rule. - * @param rule The specific {@link FilteringRuleCondition} - * @param updatedAt The update timestamp of the filtering rule. - * @param value The value associated with the filtering rule. - */ - public FilteringRule( - Instant createdAt, - String field, - String id, - Integer order, - FilteringPolicy policy, - FilteringRuleCondition rule, - Instant updatedAt, - String value - ) { - this.createdAt = createdAt; - this.field = field; - this.id = id; - this.order = order; - this.policy = policy; - this.rule = rule; - this.updatedAt = updatedAt; - this.value = value; - } - - public FilteringRule(StreamInput in) throws IOException { - this.createdAt = in.readInstant(); - this.field = in.readString(); - this.id = in.readString(); - this.order = in.readInt(); - this.policy = in.readEnum(FilteringPolicy.class); - this.rule = in.readEnum(FilteringRuleCondition.class); - this.updatedAt = in.readInstant(); - this.value = in.readString(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("createdAt", createdAt); - builder.field("field", field); - builder.field("id", id); - builder.field("order", order); - builder.field("policy", policy.toString()); - builder.field("rule", rule.toString()); - builder.field("updatedAt", updatedAt); - builder.field("value", value); - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeInstant(createdAt); - out.writeString(field); - out.writeString(id); - out.writeInt(order); - out.writeEnum(policy); - out.writeEnum(rule); - out.writeInstant(updatedAt); - out.writeString(value); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FilteringRule that = (FilteringRule) o; - return Objects.equals(createdAt, that.createdAt) - && Objects.equals(field, that.field) - && Objects.equals(id, that.id) - && Objects.equals(order, that.order) - && policy == that.policy - && rule == that.rule - && Objects.equals(updatedAt, that.updatedAt) - && Objects.equals(value, that.value); - } - - @Override - public int hashCode() { - return Objects.hash(createdAt, field, id, order, policy, rule, updatedAt, value); - } - - public static class Builder { - - private Instant createdAt; - private String field; - private String id; - private Integer order; - private FilteringPolicy policy; - private FilteringRuleCondition rule; - private Instant updatedAt; - private String value; - - public Builder setCreatedAt(Instant createdAt) { - this.createdAt = createdAt; - return this; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setId(String id) { - this.id = id; - return this; - } - - public Builder setOrder(Integer order) { - this.order = order; - return this; - } - - public Builder setPolicy(FilteringPolicy policy) { - this.policy = policy; - return this; - } - - public Builder setRule(FilteringRuleCondition rule) { - this.rule = rule; - return this; - } - - public Builder setUpdatedAt(Instant updatedAt) { - this.updatedAt = updatedAt; - return this; - } - - public Builder setValue(String value) { - this.value = value; - return this; - } - - public FilteringRule build() { - return new FilteringRule(createdAt, field, id, order, policy, rule, updatedAt, value); - } - } - } - - public static class FilteringValidation implements Writeable, ToXContentObject { - private final List ids; - private final List messages; - - /** - * Constructs a new FilteringValidation instance. - * - * @param ids The list of identifiers associated with the validation. - * @param messages The list of messages describing the validation results. - */ - public FilteringValidation(List ids, List messages) { - this.ids = ids; - this.messages = messages; - } - - public FilteringValidation(StreamInput in) throws IOException { - this.ids = in.readStringCollectionAsList(); - this.messages = in.readStringCollectionAsList(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - { - builder.stringListField("ids", ids); - builder.stringListField("messages", messages); - } - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeStringCollection(ids); - out.writeStringCollection(messages); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FilteringValidation that = (FilteringValidation) o; - return Objects.equals(ids, that.ids) && Objects.equals(messages, that.messages); - } - - @Override - public int hashCode() { - return Objects.hash(ids, messages); - } - - public static class Builder { - - private List ids; - private List messages; - - public Builder setIds(List ids) { - this.ids = ids; - return this; - } - - public Builder setMessages(List messages) { - this.messages = messages; - return this; - } - - public FilteringValidation build() { - return new FilteringValidation(ids, messages); - } - } - } - - public enum FilteringValidationState { - EDITED, - INVALID, - VALID; - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public enum FilteringPolicy { - EXCLUDE, - INCLUDE; - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public enum FilteringRuleCondition { - CONTAINS("contains"), - ENDS_WITH("ends_with"), - EQUALS("equals"), - GT(">"), - LT("<"), - REGEX("regex"), - STARTS_WITH("starts_with"); - - private final String value; - - FilteringRuleCondition(String value) { - this.value = value; - } - - @Override - public String toString() { - return this.value; - } - } - public static ConnectorFiltering getDefaultConnectorFilteringConfig() { Instant currentTimestamp = Instant.now(); return new ConnectorFiltering.Builder().setActive( - new FilteringRules.Builder().setAdvancedSnippetCreatedAt(currentTimestamp) - .setAdvancedSnippetUpdatedAt(currentTimestamp) - .setAdvancedSnippetValue(Collections.emptyMap()) + new FilteringRules.Builder().setAdvancedSnippet( + new FilteringAdvancedSnippet.Builder().setAdvancedSnippetCreatedAt(currentTimestamp) + .setAdvancedSnippetUpdatedAt(currentTimestamp) + .setAdvancedSnippetValue(Collections.emptyMap()) + .build() + ) .setRules( List.of( new FilteringRule.Builder().setCreatedAt(currentTimestamp) @@ -559,15 +179,21 @@ public static ConnectorFiltering getDefaultConnectorFilteringConfig() { .build() ) ) - .setValidationErrors(Collections.emptyList()) - .setValidationState(FilteringValidationState.VALID) + .setFilteringValidationInfo( + new FilteringValidationInfo.Builder().setValidationErrors(Collections.emptyList()) + .setValidationState(FilteringValidationState.VALID) + .build() + ) .build() ) .setDomain("DEFAULT") .setDraft( - new FilteringRules.Builder().setAdvancedSnippetCreatedAt(currentTimestamp) - .setAdvancedSnippetUpdatedAt(currentTimestamp) - .setAdvancedSnippetValue(Collections.emptyMap()) + new FilteringRules.Builder().setAdvancedSnippet( + new FilteringAdvancedSnippet.Builder().setAdvancedSnippetCreatedAt(currentTimestamp) + .setAdvancedSnippetUpdatedAt(currentTimestamp) + .setAdvancedSnippetValue(Collections.emptyMap()) + .build() + ) .setRules( List.of( new FilteringRule.Builder().setCreatedAt(currentTimestamp) @@ -581,8 +207,11 @@ public static ConnectorFiltering getDefaultConnectorFilteringConfig() { .build() ) ) - .setValidationErrors(Collections.emptyList()) - .setValidationState(FilteringValidationState.VALID) + .setFilteringValidationInfo( + new FilteringValidationInfo.Builder().setValidationErrors(Collections.emptyList()) + .setValidationState(FilteringValidationState.VALID) + .build() + ) .build() ) .build(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 0ae971d161b17..a8c9749d3fbc1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -7,14 +7,33 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentType; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; @@ -52,4 +71,145 @@ public void putConnector(Connector connector, ActionListener l listener.onFailure(e); } } + + /** + * Gets the {@link Connector} from the underlying index. + * + * @param connectorId The id of the connector object. + * @param listener The action listener to invoke on response/failure. + */ + public void getConnector(String connectorId, ActionListener listener) { + try { + final GetRequest getRequest = new GetRequest(CONNECTOR_INDEX_NAME).id(connectorId).realtime(true); + + clientWithOrigin.get(getRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, getResponse) -> { + if (getResponse.isExists() == false) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + try { + final Connector connector = Connector.fromXContentBytes(getResponse.getSourceAsBytesRef(), XContentType.JSON); + l.onResponse(connector); + } catch (Exception e) { + listener.onFailure(e); + } + })); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Deletes the {@link Connector} in the underlying index. + * + * @param connectorId The id of the connector object. + * @param listener The action listener to invoke on response/failure. + */ + public void deleteConnector(String connectorId, ActionListener listener) { + + final DeleteRequest deleteRequest = new DeleteRequest(CONNECTOR_INDEX_NAME).id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + try { + clientWithOrigin.delete( + deleteRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, deleteResponse) -> { + if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(deleteResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + + } + + /** + * List the {@link Connector} in ascending order of their ids. + * + * @param from From index to start the search from. + * @param size The maximum number of {@link Connector}s to return. + * @param listener The action listener to invoke on response/failure. + */ + public void listConnectors(int from, int size, ActionListener listener) { + try { + final SearchSourceBuilder source = new SearchSourceBuilder().from(from) + .size(size) + .query(new MatchAllQueryBuilder()) + .fetchSource(true) + .sort(Connector.ID_FIELD.getPreferredName(), SortOrder.ASC); + final SearchRequest req = new SearchRequest(CONNECTOR_INDEX_NAME).source(source); + clientWithOrigin.search(req, new ActionListener<>() { + @Override + public void onResponse(SearchResponse searchResponse) { + try { + listener.onResponse(mapSearchResponseToConnectorList(searchResponse)); + } catch (Exception e) { + listener.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + if (e instanceof IndexNotFoundException) { + listener.onResponse(new ConnectorIndexService.ConnectorResult(Collections.emptyList(), 0L)); + return; + } + listener.onFailure(e); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } + + private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnectorList(SearchResponse response) { + final List connectorResults = Arrays.stream(response.getHits().getHits()) + .map(ConnectorIndexService::hitToConnector) + .toList(); + return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value); + } + + private static Connector hitToConnector(SearchHit searchHit) { + + // todo: don't return sensitive data from configuration in list endpoint + + return Connector.fromXContentBytes(searchHit.getSourceRef(), XContentType.JSON); + } + + public record ConnectorResult(List connectors, long totalResults) {} + + /** + * Listeners that checks failures for IndexNotFoundException, and transforms them in ResourceNotFoundException, + * invoking onFailure on the delegate listener + */ + static class DelegatingIndexNotFoundActionListener extends DelegatingActionListener { + + private final BiConsumer, T> bc; + private final String connectorId; + + DelegatingIndexNotFoundActionListener(String connectorId, ActionListener delegate, BiConsumer, T> bc) { + super(delegate); + this.bc = bc; + this.connectorId = connectorId; + } + + @Override + public void onResponse(T t) { + bc.accept(delegate, t); + } + + @Override + public void onFailure(Exception e) { + Throwable cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof IndexNotFoundException) { + delegate.onFailure(new ResourceNotFoundException("connector [" + connectorId + "] not found")); + return; + } + delegate.onFailure(e); + } + } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipeline.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipeline.java index 8b94624ee981c..620e19968309d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipeline.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipeline.java @@ -7,16 +7,25 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class ConnectorIngestPipeline implements Writeable, ToXContentObject { private final Boolean extractBinaryContent; @@ -51,6 +60,35 @@ public ConnectorIngestPipeline(StreamInput in) throws IOException { private static final ParseField REDUCE_WHITESPACE_FIELD = new ParseField("reduce_whitespace"); private static final ParseField RUN_ML_INFERENCE_FIELD = new ParseField("run_ml_inference"); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_ingest_pipeline", + true, + args -> new Builder().setExtractBinaryContent((Boolean) args[0]) + .setName((String) args[1]) + .setReduceWhitespace((Boolean) args[2]) + .setRunMlInference((Boolean) args[3]) + .build() + ); + + static { + PARSER.declareBoolean(constructorArg(), EXTRACT_BINARY_CONTENT_FIELD); + PARSER.declareString(constructorArg(), NAME_FIELD); + PARSER.declareBoolean(constructorArg(), REDUCE_WHITESPACE_FIELD); + PARSER.declareBoolean(constructorArg(), RUN_ML_INFERENCE_FIELD); + } + + public static ConnectorIngestPipeline fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorIngestPipeline.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static ConnectorIngestPipeline fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorListItem.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorListItem.java deleted file mode 100644 index 22dd8d2ae3f22..0000000000000 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorListItem.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; - -/** - * This class is used for returning information for lists of connectors, to avoid including all - * {@link Connector} information which can be retrieved using subsequent GetConnector requests. - */ -public class ConnectorListItem implements Writeable, ToXContentObject { - - private static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); - private static final ParseField NAME_FIELD = new ParseField("name"); - - private final String connectorId; - - @Nullable - private final String name; - - public ConnectorListItem(String connectorId, @Nullable String name) { - this.connectorId = connectorId; - this.name = name; - } - - public ConnectorListItem(StreamInput in) throws IOException { - this.connectorId = in.readString(); - this.name = in.readOptionalString(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); - if (name != null) { - builder.field(NAME_FIELD.getPreferredName(), name); - } - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(connectorId); - out.writeOptionalString(name); - } -} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorStatus.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorStatus.java index b994ccc89c1d9..5ebbab668890b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorStatus.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorStatus.java @@ -30,4 +30,13 @@ public enum ConnectorStatus { public String toString() { return name().toLowerCase(Locale.ROOT); } + + public static ConnectorStatus connectorStatus(String status) { + for (ConnectorStatus connectorStatus : ConnectorStatus.values()) { + if (connectorStatus.name().equalsIgnoreCase(status)) { + return connectorStatus; + } + } + throw new IllegalArgumentException("Unknown ConnectorStatus: " + status); + } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java index 2cc158fdd5f64..10a2d54e29300 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java @@ -96,17 +96,17 @@ public ConnectorSyncInfo(StreamInput in) throws IOException { this.lastSynced = in.readOptionalInstant(); } - private static final ParseField LAST_ACCESS_CONTROL_SYNC_ERROR = new ParseField("last_access_control_sync_error"); - private static final ParseField LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD = new ParseField("last_access_control_sync_status"); - private static final ParseField LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_access_control_sync_scheduled_at"); - private static final ParseField LAST_DELETED_DOCUMENT_COUNT_FIELD = new ParseField("last_deleted_document_count"); - private static final ParseField LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_incremental_sync_scheduled_at"); - private static final ParseField LAST_INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("last_indexed_document_count"); - private static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); - private static final ParseField LAST_SYNC_ERROR_FIELD = new ParseField("last_sync_error"); - private static final ParseField LAST_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_sync_scheduled_at"); - private static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); - private static final ParseField LAST_SYNCED_FIELD = new ParseField("last_synced"); + static final ParseField LAST_ACCESS_CONTROL_SYNC_ERROR = new ParseField("last_access_control_sync_error"); + static final ParseField LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD = new ParseField("last_access_control_sync_status"); + static final ParseField LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_access_control_sync_scheduled_at"); + static final ParseField LAST_DELETED_DOCUMENT_COUNT_FIELD = new ParseField("last_deleted_document_count"); + static final ParseField LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_incremental_sync_scheduled_at"); + static final ParseField LAST_INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("last_indexed_document_count"); + static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); + static final ParseField LAST_SYNC_ERROR_FIELD = new ParseField("last_sync_error"); + static final ParseField LAST_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_sync_scheduled_at"); + static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); + static final ParseField LAST_SYNCED_FIELD = new ParseField("last_synced"); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncStatus.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncStatus.java index 97ea570b4f866..b138fc864881a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncStatus.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncStatus.java @@ -34,4 +34,13 @@ public enum ConnectorSyncStatus { public String toString() { return name().toLowerCase(Locale.ROOT); } + + public static ConnectorSyncStatus connectorSyncStatus(String status) { + for (ConnectorSyncStatus connectorSyncStatus : ConnectorSyncStatus.values()) { + if (connectorSyncStatus.name().equalsIgnoreCase(status)) { + return connectorSyncStatus; + } + } + throw new IllegalArgumentException("Unknown ConnectorSyncStatus: " + status); + } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java new file mode 100644 index 0000000000000..fab57921772d9 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorAction.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class DeleteConnectorAction extends ActionType { + + public static final DeleteConnectorAction INSTANCE = new DeleteConnectorAction(); + public static final String NAME = "cluster:admin/xpack/connector/delete"; + + private DeleteConnectorAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + private static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + } + + public Request(String connectorId) { + this.connectorId = connectorId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("connector_id missing", validationException); + } + + return validationException; + } + + public String getConnectorId() { + return connectorId; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "delete_connector_request", + false, + (p) -> new Request((String) p[0]) + ); + static { + PARSER.declareString(constructorArg(), CONNECTOR_ID_FIELD); + } + + public static DeleteConnectorAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java new file mode 100644 index 0000000000000..61d5947489322 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class GetConnectorAction extends ActionType { + + public static final GetConnectorAction INSTANCE = new GetConnectorAction(); + public static final String NAME = "cluster:admin/xpack/connector/get"; + + private GetConnectorAction() { + super(NAME, GetConnectorAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + private static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); + + public Request(String connectorId) { + this.connectorId = connectorId; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + } + + public String getConnectorId() { + return connectorId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("connector_id missing", validationException); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_connector_request", + false, + (p) -> new Request((String) p[0]) + + ); + static { + PARSER.declareString(constructorArg(), CONNECTOR_ID_FIELD); + } + + public static Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final Connector connector; + + public Response(Connector connector) { + this.connector = connector; + } + + public Response(StreamInput in) throws IOException { + super(in); + this.connector = new Connector(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + connector.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return connector.toXContent(builder, params); + } + + public static GetConnectorAction.Response fromXContent(XContentParser parser) throws IOException { + return new GetConnectorAction.Response(Connector.fromXContent(parser)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(connector, response.connector); + } + + @Override + public int hashCode() { + return Objects.hash(connector); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java new file mode 100644 index 0000000000000..70cee8b064c71 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.action.util.QueryPage; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class ListConnectorAction extends ActionType { + + public static final ListConnectorAction INSTANCE = new ListConnectorAction(); + public static final String NAME = "cluster:admin/xpack/connector/list"; + + public ListConnectorAction() { + super(NAME, ListConnectorAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final PageParams pageParams; + + private static final ParseField PAGE_PARAMS_FIELD = new ParseField("pageParams"); + + public Request(StreamInput in) throws IOException { + super(in); + this.pageParams = new PageParams(in); + } + + public Request(PageParams pageParams) { + this.pageParams = pageParams; + } + + public PageParams getPageParams() { + return pageParams; + } + + @Override + public ActionRequestValidationException validate() { + // Pagination validation is done as part of PageParams constructor + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + pageParams.writeTo(out); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListConnectorAction.Request that = (ListConnectorAction.Request) o; + return Objects.equals(pageParams, that.pageParams); + } + + @Override + public int hashCode() { + return Objects.hash(pageParams); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "list_connector_request", + p -> new ListConnectorAction.Request((PageParams) p[0]) + ); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> PageParams.fromXContent(p), PAGE_PARAMS_FIELD); + } + + public static ListConnectorAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(PAGE_PARAMS_FIELD.getPreferredName(), pageParams); + builder.endObject(); + return builder; + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + public static final ParseField RESULT_FIELD = new ParseField("results"); + + final QueryPage queryPage; + + public Response(StreamInput in) throws IOException { + super(in); + this.queryPage = new QueryPage<>(in, Connector::new); + } + + public Response(List items, Long totalResults) { + this.queryPage = new QueryPage<>(items, totalResults, RESULT_FIELD); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + queryPage.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return queryPage.toXContent(builder, params); + } + + public QueryPage queryPage() { + return queryPage; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListConnectorAction.Response that = (ListConnectorAction.Response) o; + return queryPage.equals(that.queryPage); + } + + @Override + public int hashCode() { + return queryPage.hashCode(); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java new file mode 100644 index 0000000000000..02153710a99a0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestDeleteConnectorAction.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; + +public class RestDeleteConnectorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_delete_action"; + } + + @Override + public List routes() { + return List.of(new Route(DELETE, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + DeleteConnectorAction.Request request = new DeleteConnectorAction.Request(restRequest.param("connector_id")); + return channel -> client.execute(DeleteConnectorAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestGetConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestGetConnectorAction.java new file mode 100644 index 0000000000000..50691bf4d5ea8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestGetConnectorAction.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +public class RestGetConnectorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_get_action"; + } + + @Override + public List routes() { + return List.of(new Route(GET, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + GetConnectorAction.Request request = new GetConnectorAction.Request(restRequest.param("connector_id")); + return channel -> client.execute(GetConnectorAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java new file mode 100644 index 0000000000000..59d984438ebf6 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.core.action.util.PageParams; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +public class RestListConnectorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_list_action"; + } + + @Override + public List routes() { + return List.of(new Route(GET, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + int from = restRequest.paramAsInt("from", PageParams.DEFAULT_FROM); + int size = restRequest.paramAsInt("size", PageParams.DEFAULT_SIZE); + ListConnectorAction.Request request = new ListConnectorAction.Request(new PageParams(from, size)); + + return channel -> client.execute(ListConnectorAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java new file mode 100644 index 0000000000000..f83f340ec1ae7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportDeleteConnectorAction.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportDeleteConnectorAction extends HandledTransportAction { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportDeleteConnectorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + DeleteConnectorAction.NAME, + transportService, + actionFilters, + DeleteConnectorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute(Task task, DeleteConnectorAction.Request request, ActionListener listener) { + String connectorId = request.getConnectorId(); + connectorIndexService.deleteConnector(connectorId, listener.map(v -> AcknowledgedResponse.TRUE)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportGetConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportGetConnectorAction.java new file mode 100644 index 0000000000000..44359ac55d5d0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportGetConnectorAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportGetConnectorAction extends HandledTransportAction { + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportGetConnectorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + GetConnectorAction.NAME, + transportService, + actionFilters, + GetConnectorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute(Task task, GetConnectorAction.Request request, ActionListener listener) { + connectorIndexService.getConnector(request.getConnectorId(), listener.map(GetConnectorAction.Response::new)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java new file mode 100644 index 0000000000000..cfe05965da37b --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; +import org.elasticsearch.xpack.core.action.util.PageParams; + +public class TransportListConnectorAction extends HandledTransportAction { + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportListConnectorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + ListConnectorAction.NAME, + transportService, + actionFilters, + ListConnectorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute(Task task, ListConnectorAction.Request request, ActionListener listener) { + final PageParams pageParams = request.getPageParams(); + connectorIndexService.listConnectors( + pageParams.getFrom(), + pageParams.getSize(), + listener.map(r -> new ListConnectorAction.Response(r.connectors(), r.totalResults())) + ); + } + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringAdvancedSnippet.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringAdvancedSnippet.java new file mode 100644 index 0000000000000..ca7d3bfa6d9c8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringAdvancedSnippet.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.filtering; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.time.Instant; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Represents an advanced snippet used in filtering processes, providing detailed criteria or rules. + * This class includes timestamps for the creation and last update of the snippet, along with the + * actual snippet content represented as a map. + */ +public class FilteringAdvancedSnippet implements Writeable, ToXContentObject { + + private final Instant advancedSnippetCreatedAt; + private final Instant advancedSnippetUpdatedAt; + private final Map advancedSnippetValue; + + /** + * @param advancedSnippetCreatedAt The creation timestamp of the advanced snippet. + * @param advancedSnippetUpdatedAt The update timestamp of the advanced snippet. + * @param advancedSnippetValue The value of the advanced snippet. + */ + private FilteringAdvancedSnippet( + Instant advancedSnippetCreatedAt, + Instant advancedSnippetUpdatedAt, + Map advancedSnippetValue + ) { + this.advancedSnippetCreatedAt = advancedSnippetCreatedAt; + this.advancedSnippetUpdatedAt = advancedSnippetUpdatedAt; + this.advancedSnippetValue = advancedSnippetValue; + } + + public FilteringAdvancedSnippet(StreamInput in) throws IOException { + this.advancedSnippetCreatedAt = in.readInstant(); + this.advancedSnippetUpdatedAt = in.readInstant(); + this.advancedSnippetValue = in.readMap(StreamInput::readString, StreamInput::readGenericValue); + } + + private static final ParseField CREATED_AT_FIELD = new ParseField("created_at"); + private static final ParseField UPDATED_AT_FIELD = new ParseField("updated_at"); + private static final ParseField VALUE_FIELD = new ParseField("value"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_filtering_advanced_snippet", + true, + args -> new Builder().setAdvancedSnippetCreatedAt((Instant) args[0]) + .setAdvancedSnippetUpdatedAt((Instant) args[1]) + .setAdvancedSnippetValue((Map) args[2]) + .build() + ); + + static { + PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), CREATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), UPDATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField(constructorArg(), (p, c) -> p.map(), VALUE_FIELD, ObjectParser.ValueType.OBJECT); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(CREATED_AT_FIELD.getPreferredName(), advancedSnippetCreatedAt); + builder.field(UPDATED_AT_FIELD.getPreferredName(), advancedSnippetUpdatedAt); + builder.field(VALUE_FIELD.getPreferredName(), advancedSnippetValue); + } + builder.endObject(); + return builder; + } + + public static FilteringAdvancedSnippet fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInstant(advancedSnippetCreatedAt); + out.writeInstant(advancedSnippetUpdatedAt); + out.writeMap(advancedSnippetValue, StreamOutput::writeString, StreamOutput::writeGenericValue); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FilteringAdvancedSnippet that = (FilteringAdvancedSnippet) o; + return Objects.equals(advancedSnippetCreatedAt, that.advancedSnippetCreatedAt) + && Objects.equals(advancedSnippetUpdatedAt, that.advancedSnippetUpdatedAt) + && Objects.equals(advancedSnippetValue, that.advancedSnippetValue); + } + + @Override + public int hashCode() { + return Objects.hash(advancedSnippetCreatedAt, advancedSnippetUpdatedAt, advancedSnippetValue); + } + + public static class Builder { + + private Instant advancedSnippetCreatedAt; + private Instant advancedSnippetUpdatedAt; + private Map advancedSnippetValue; + + public Builder setAdvancedSnippetCreatedAt(Instant advancedSnippetCreatedAt) { + this.advancedSnippetCreatedAt = advancedSnippetCreatedAt; + return this; + } + + public Builder setAdvancedSnippetUpdatedAt(Instant advancedSnippetUpdatedAt) { + this.advancedSnippetUpdatedAt = advancedSnippetUpdatedAt; + return this; + } + + public Builder setAdvancedSnippetValue(Map advancedSnippetValue) { + this.advancedSnippetValue = advancedSnippetValue; + return this; + } + + public FilteringAdvancedSnippet build() { + return new FilteringAdvancedSnippet(advancedSnippetCreatedAt, advancedSnippetUpdatedAt, advancedSnippetValue); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringPolicy.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringPolicy.java new file mode 100644 index 0000000000000..48170cfc8fae4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringPolicy.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.filtering; + +import java.util.Locale; + +public enum FilteringPolicy { + EXCLUDE, + INCLUDE; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static FilteringPolicy filteringPolicy(String policy) { + for (FilteringPolicy filteringPolicy : FilteringPolicy.values()) { + if (filteringPolicy.name().equalsIgnoreCase(policy)) { + return filteringPolicy; + } + } + throw new IllegalArgumentException("Unknown FilteringPolicy: " + policy); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRule.java new file mode 100644 index 0000000000000..cfcc639b8b613 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRule.java @@ -0,0 +1,237 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.filtering; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.time.Instant; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Represents a single rule used for filtering in a data processing or querying context. + * Each {@link FilteringRule} includes details such as its creation and update timestamps, + * the specific field it applies to, an identifier, and its order in a set of rules. + * Additionally, it encapsulates the filtering policy, the condition under which the rule applies, + * and the value associated with the rule. + */ +public class FilteringRule implements Writeable, ToXContentObject { + + private final Instant createdAt; + private final String field; + private final String id; + private final Integer order; + private final FilteringPolicy policy; + private final FilteringRuleCondition rule; + private final Instant updatedAt; + private final String value; + + /** + * Constructs a new FilteringRule instance. + * + * @param createdAt The creation timestamp of the filtering rule. + * @param field The field associated with the filtering rule. + * @param id The identifier of the filtering rule. + * @param order The order of the filtering rule. + * @param policy The {@link FilteringPolicy} of the filtering rule. + * @param rule The specific {@link FilteringRuleCondition} + * @param updatedAt The update timestamp of the filtering rule. + * @param value The value associated with the filtering rule. + */ + public FilteringRule( + Instant createdAt, + String field, + String id, + Integer order, + FilteringPolicy policy, + FilteringRuleCondition rule, + Instant updatedAt, + String value + ) { + this.createdAt = createdAt; + this.field = field; + this.id = id; + this.order = order; + this.policy = policy; + this.rule = rule; + this.updatedAt = updatedAt; + this.value = value; + } + + public FilteringRule(StreamInput in) throws IOException { + this.createdAt = in.readInstant(); + this.field = in.readString(); + this.id = in.readString(); + this.order = in.readInt(); + this.policy = in.readEnum(FilteringPolicy.class); + this.rule = in.readEnum(FilteringRuleCondition.class); + this.updatedAt = in.readInstant(); + this.value = in.readString(); + } + + private static final ParseField CREATED_AT_FIELD = new ParseField("created_at"); + private static final ParseField FIELD_FIELD = new ParseField("field"); + private static final ParseField ID_FIELD = new ParseField("id"); + private static final ParseField ORDER_FIELD = new ParseField("order"); + private static final ParseField POLICY_FIELD = new ParseField("policy"); + private static final ParseField RULE_FIELD = new ParseField("rule"); + private static final ParseField UPDATED_AT_FIELD = new ParseField("updated_at"); + private static final ParseField VALUE_FIELD = new ParseField("value"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_filtering_rule", + true, + args -> new Builder().setCreatedAt((Instant) args[0]) + .setField((String) args[1]) + .setId((String) args[2]) + .setOrder((Integer) args[3]) + .setPolicy((FilteringPolicy) args[4]) + .setRule((FilteringRuleCondition) args[5]) + .setUpdatedAt((Instant) args[6]) + .setValue((String) args[7]) + .build() + ); + + static { + PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), CREATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareString(constructorArg(), ID_FIELD); + PARSER.declareInt(constructorArg(), ORDER_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> FilteringPolicy.filteringPolicy(p.text()), + POLICY_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField( + constructorArg(), + (p, c) -> FilteringRuleCondition.filteringRuleCondition(p.text()), + RULE_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), UPDATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareString(constructorArg(), VALUE_FIELD); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CREATED_AT_FIELD.getPreferredName(), createdAt); + builder.field(FIELD_FIELD.getPreferredName(), field); + builder.field(ID_FIELD.getPreferredName(), id); + builder.field(ORDER_FIELD.getPreferredName(), order); + builder.field(POLICY_FIELD.getPreferredName(), policy.toString()); + builder.field(RULE_FIELD.getPreferredName(), rule.toString()); + builder.field(UPDATED_AT_FIELD.getPreferredName(), updatedAt); + builder.field(VALUE_FIELD.getPreferredName(), value); + builder.endObject(); + return builder; + } + + public static FilteringRule fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeInstant(createdAt); + out.writeString(field); + out.writeString(id); + out.writeInt(order); + out.writeEnum(policy); + out.writeEnum(rule); + out.writeInstant(updatedAt); + out.writeString(value); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FilteringRule that = (FilteringRule) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(field, that.field) + && Objects.equals(id, that.id) + && Objects.equals(order, that.order) + && policy == that.policy + && rule == that.rule + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(createdAt, field, id, order, policy, rule, updatedAt, value); + } + + public static class Builder { + + private Instant createdAt; + private String field; + private String id; + private Integer order; + private FilteringPolicy policy; + private FilteringRuleCondition rule; + private Instant updatedAt; + private String value; + + public Builder setCreatedAt(Instant createdAt) { + this.createdAt = createdAt; + return this; + } + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setId(String id) { + this.id = id; + return this; + } + + public Builder setOrder(Integer order) { + this.order = order; + return this; + } + + public Builder setPolicy(FilteringPolicy policy) { + this.policy = policy; + return this; + } + + public Builder setRule(FilteringRuleCondition rule) { + this.rule = rule; + return this; + } + + public Builder setUpdatedAt(Instant updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Builder setValue(String value) { + this.value = value; + return this; + } + + public FilteringRule build() { + return new FilteringRule(createdAt, field, id, order, policy, rule, updatedAt, value); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRuleCondition.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRuleCondition.java new file mode 100644 index 0000000000000..967107961b0d4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRuleCondition.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.filtering; + +public enum FilteringRuleCondition { + CONTAINS("contains"), + ENDS_WITH("ends_with"), + EQUALS("equals"), + GT(">"), + LT("<"), + REGEX("regex"), + STARTS_WITH("starts_with"); + + private final String value; + + FilteringRuleCondition(String value) { + this.value = value; + } + + @Override + public String toString() { + return this.value; + } + + public static FilteringRuleCondition filteringRuleCondition(String condition) { + for (FilteringRuleCondition filteringRuleCondition : FilteringRuleCondition.values()) { + if (filteringRuleCondition.value.equals(condition)) { + return filteringRuleCondition; + } + } + throw new IllegalArgumentException("Unknown FilteringRuleCondition: " + condition); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java new file mode 100644 index 0000000000000..dc96006f40349 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringRules.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.filtering; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * The {@link FilteringRules} class encapsulates the rules and configurations for filtering operations in a connector. + * It includes an advanced snippet for complex filtering logic, a list of individual filtering rules, and validation + * information for these rules. + */ +public class FilteringRules implements Writeable, ToXContentObject { + + private final FilteringAdvancedSnippet advancedSnippet; + private final List rules; + + private final FilteringValidationInfo filteringValidationInfo; + + /** + * Constructs a new FilteringRules instance. + * + * @param advancedSnippet The {@link FilteringAdvancedSnippet} object. + * @param rules The list of {@link FilteringRule} objects + * @param filteringValidationInfo The {@link FilteringValidationInfo} object. + */ + public FilteringRules( + FilteringAdvancedSnippet advancedSnippet, + List rules, + FilteringValidationInfo filteringValidationInfo + ) { + this.advancedSnippet = advancedSnippet; + this.rules = rules; + this.filteringValidationInfo = filteringValidationInfo; + } + + public FilteringRules(StreamInput in) throws IOException { + this.advancedSnippet = new FilteringAdvancedSnippet(in); + this.rules = in.readCollectionAsList(FilteringRule::new); + this.filteringValidationInfo = new FilteringValidationInfo(in); + } + + private static final ParseField ADVANCED_SNIPPET_FIELD = new ParseField("advanced_snippet"); + private static final ParseField RULES_FIELD = new ParseField("rules"); + private static final ParseField VALIDATION_FIELD = new ParseField("validation"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_filtering_rules", + true, + args -> new Builder().setAdvancedSnippet((FilteringAdvancedSnippet) args[0]) + .setRules((List) args[1]) + .setFilteringValidationInfo((FilteringValidationInfo) args[2]) + .build() + ); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> FilteringAdvancedSnippet.fromXContent(p), ADVANCED_SNIPPET_FIELD); + PARSER.declareObjectArray(constructorArg(), (p, c) -> FilteringRule.fromXContent(p), RULES_FIELD); + PARSER.declareObject(constructorArg(), (p, c) -> FilteringValidationInfo.fromXContent(p), VALIDATION_FIELD); + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(ADVANCED_SNIPPET_FIELD.getPreferredName(), advancedSnippet); + builder.xContentList(RULES_FIELD.getPreferredName(), rules); + builder.field(VALIDATION_FIELD.getPreferredName(), filteringValidationInfo); + } + builder.endObject(); + return builder; + } + + public static FilteringRules fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + advancedSnippet.writeTo(out); + out.writeCollection(rules); + filteringValidationInfo.writeTo(out); + + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FilteringRules that = (FilteringRules) o; + return Objects.equals(advancedSnippet, that.advancedSnippet) + && Objects.equals(rules, that.rules) + && Objects.equals(filteringValidationInfo, that.filteringValidationInfo); + } + + @Override + public int hashCode() { + return Objects.hash(advancedSnippet, rules, filteringValidationInfo); + } + + public static class Builder { + + private FilteringAdvancedSnippet advancedSnippet; + private List rules; + private FilteringValidationInfo filteringValidationInfo; + + public Builder setAdvancedSnippet(FilteringAdvancedSnippet advancedSnippet) { + this.advancedSnippet = advancedSnippet; + return this; + } + + public Builder setRules(List rules) { + this.rules = rules; + return this; + } + + public Builder setFilteringValidationInfo(FilteringValidationInfo filteringValidationInfo) { + this.filteringValidationInfo = filteringValidationInfo; + return this; + } + + public FilteringRules build() { + return new FilteringRules(advancedSnippet, rules, filteringValidationInfo); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidation.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidation.java new file mode 100644 index 0000000000000..bb2de688f6705 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidation.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.filtering; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Represents the details of a validation process, including identifiers and descriptive messages. + * This class is used to encapsulate information about specific validation checks, where each validation + * is associated with a list of IDs and corresponding messages that detail the validation results. + */ +public class FilteringValidation implements Writeable, ToXContentObject { + private final List ids; + private final List messages; + + /** + * Constructs a new FilteringValidation instance. + * + * @param ids The list of identifiers associated with the validation. + * @param messages The list of messages describing the validation results. + */ + public FilteringValidation(List ids, List messages) { + this.ids = ids; + this.messages = messages; + } + + public FilteringValidation(StreamInput in) throws IOException { + this.ids = in.readStringCollectionAsList(); + this.messages = in.readStringCollectionAsList(); + } + + private static final ParseField IDS_FIELD = new ParseField("ids"); + private static final ParseField MESSAGES_FIELD = new ParseField("messages"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_filtering_validation", + true, + args -> new Builder().setIds((List) args[0]).setMessages((List) args[1]).build() + ); + + static { + PARSER.declareStringArray(constructorArg(), IDS_FIELD); + PARSER.declareStringArray(constructorArg(), MESSAGES_FIELD); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.stringListField(IDS_FIELD.getPreferredName(), ids); + builder.stringListField(MESSAGES_FIELD.getPreferredName(), messages); + } + return builder; + } + + public static FilteringValidation fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(ids); + out.writeStringCollection(messages); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FilteringValidation that = (FilteringValidation) o; + return Objects.equals(ids, that.ids) && Objects.equals(messages, that.messages); + } + + @Override + public int hashCode() { + return Objects.hash(ids, messages); + } + + public static class Builder { + + private List ids; + private List messages; + + public Builder setIds(List ids) { + this.ids = ids; + return this; + } + + public Builder setMessages(List messages) { + this.messages = messages; + return this; + } + + public FilteringValidation build() { + return new FilteringValidation(ids, messages); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationInfo.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationInfo.java new file mode 100644 index 0000000000000..c0cd80d867592 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationInfo.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.filtering; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Encapsulates validation information for filtering rules, including any errors encountered + * during validation and the overall state of validation. + * + * This class holds a list of validation errors, each represented by a {@link FilteringValidation} object, + * and the validation state, indicated by a {@link FilteringValidationState}. + */ +public class FilteringValidationInfo implements Writeable, ToXContentObject { + + private final List validationErrors; + private final FilteringValidationState validationState; + + /** + * @param validationErrors The list of {@link FilteringValidation} errors for the filtering rules. + * @param validationState The {@link FilteringValidationState} of the filtering rules. + */ + public FilteringValidationInfo(List validationErrors, FilteringValidationState validationState) { + this.validationErrors = validationErrors; + this.validationState = validationState; + } + + public FilteringValidationInfo(StreamInput in) throws IOException { + this.validationErrors = in.readCollectionAsList(FilteringValidation::new); + this.validationState = in.readEnum(FilteringValidationState.class); + } + + private static final ParseField ERRORS_FIELD = new ParseField("errors"); + private static final ParseField STATE_FIELD = new ParseField("state"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "filtering_validation_info", + true, + args -> new Builder().setValidationErrors((List) args[0]) + .setValidationState((FilteringValidationState) args[1]) + .build() + ); + + static { + PARSER.declareObjectArray(constructorArg(), (p, c) -> FilteringValidation.fromXContent(p), ERRORS_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> FilteringValidationState.filteringValidationState(p.text()), + STATE_FIELD, + ObjectParser.ValueType.STRING + ); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(ERRORS_FIELD.getPreferredName(), validationErrors); + builder.field(STATE_FIELD.getPreferredName(), validationState); + } + builder.endObject(); + return builder; + } + + public static FilteringValidationInfo fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(validationErrors); + out.writeEnum(validationState); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FilteringValidationInfo that = (FilteringValidationInfo) o; + return Objects.equals(validationErrors, that.validationErrors) && validationState == that.validationState; + } + + @Override + public int hashCode() { + return Objects.hash(validationErrors, validationState); + } + + public static class Builder { + + private List validationErrors; + private FilteringValidationState validationState; + + public Builder setValidationErrors(List validationErrors) { + this.validationErrors = validationErrors; + return this; + } + + public Builder setValidationState(FilteringValidationState validationState) { + this.validationState = validationState; + return this; + } + + public FilteringValidationInfo build() { + return new FilteringValidationInfo(validationErrors, validationState); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationState.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationState.java new file mode 100644 index 0000000000000..e2d370e3b9ed8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/filtering/FilteringValidationState.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.filtering; + +import java.util.Locale; + +public enum FilteringValidationState { + EDITED, + INVALID, + VALID; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static FilteringValidationState filteringValidationState(String validationState) { + for (FilteringValidationState filteringValidationState : FilteringValidationState.values()) { + if (filteringValidationState.name().equalsIgnoreCase(validationState)) { + return filteringValidationState; + } + } + throw new IllegalArgumentException("Unknown FilteringValidationState: " + validationState); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorCustomScheduleTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorCustomScheduleTests.java index 2509405874869..9a1125410f493 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorCustomScheduleTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorCustomScheduleTests.java @@ -7,16 +7,24 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import org.junit.Before; import java.io.IOException; import java.util.List; import static java.util.Collections.emptyList; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.equalTo; public class ConnectorCustomScheduleTests extends ESTestCase { @@ -37,6 +45,40 @@ public final void testRandomSerialization() throws IOException { } } + public void testToXContent() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "configuration_overrides": { + "domain_allowlist": [ + "https://example.com" + ], + "max_crawl_depth": 1, + "seed_urls": [ + "https://example.com/blog", + "https://example.com/info" + ], + "sitemap_discovery_disabled": true, + "sitemap_urls": [ + "https://example.com/sitemap.xml" + ] + }, + "enabled": true, + "interval": "0 0 12 * * ?", + "last_synced": null, + "name": "My Schedule" + } + """); + + ConnectorCustomSchedule customSchedule = ConnectorCustomSchedule.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(customSchedule, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorCustomSchedule parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorCustomSchedule.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + private void assertTransportSerialization(ConnectorCustomSchedule testInstance) throws IOException { ConnectorCustomSchedule deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFeaturesTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFeaturesTests.java index f620586457099..1563ff5fcf82c 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFeaturesTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFeaturesTests.java @@ -7,16 +7,24 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import org.junit.Before; import java.io.IOException; import java.util.List; import static java.util.Collections.emptyList; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.equalTo; public class ConnectorFeaturesTests extends ESTestCase { @@ -38,6 +46,81 @@ public final void testRandomSerialization() throws IOException { } } + public void testToXContent() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "document_level_security": { + "enabled": true + }, + "filtering_advanced_config": true, + "sync_rules": { + "advanced": { + "enabled": false + }, + "basic": { + "enabled": true + } + } + } + """); + + testToXContentChecker(content); + } + + public void testToXContentMissingDocumentLevelSecurity() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "filtering_advanced_config": true, + "sync_rules": { + "advanced": { + "enabled": false + }, + "basic": { + "enabled": true + } + } + } + """); + + testToXContentChecker(content); + } + + public void testToXContentMissingSyncRules() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "filtering_advanced_config": true + } + """); + + testToXContentChecker(content); + } + + public void testToXContentMissingSyncRulesAdvanced() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "filtering_advanced_config": true, + "sync_rules": { + "basic": { + "enabled": true + } + } + } + """); + + testToXContentChecker(content); + } + + private void testToXContentChecker(String content) throws IOException { + ConnectorFeatures features = ConnectorFeatures.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(features, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorFeatures parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorFeatures.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + private void assertTransportSerialization(ConnectorFeatures testInstance) throws IOException { ConnectorFeatures deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java index c3e6cc2d8b7ad..e65236e90d928 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorFilteringTests.java @@ -7,16 +7,24 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import org.junit.Before; import java.io.IOException; import java.util.List; import static java.util.Collections.emptyList; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.equalTo; public class ConnectorFilteringTests extends ESTestCase { @@ -38,6 +46,70 @@ public final void testRandomSerialization() throws IOException { } } + public void testToXContent() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "active": { + "advanced_snippet": { + "created_at": "2023-11-09T15:13:08.231Z", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-11-09T15:13:08.231Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-11-09T15:13:08.231Z", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-11-09T15:13:08.231Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + """); + + ConnectorFiltering filtering = ConnectorFiltering.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(filtering, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorFiltering parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorFiltering.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + + } + private void assertTransportSerialization(ConnectorFiltering testInstance) throws IOException { ConnectorFiltering deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 6e1c42dab130f..71076693c07f8 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -7,12 +7,16 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -34,11 +38,51 @@ public void setup() { public void testPutConnector() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + } + + public void testDeleteConnector() throws Exception { + int numConnectors = 5; + List connectorIds = new ArrayList<>(); + for (int i = 0; i < numConnectors; i++) { + Connector connector = ConnectorTestUtils.getRandomConnector(); + connectorIds.add(connector.getConnectorId()); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), equalTo(RestStatus.CREATED)); + } - // TODO: more checks once GET endpoint is implemented :) + String connectorIdToDelete = connectorIds.get(0); + DeleteResponse resp = awaitDeleteConnector(connectorIdToDelete); + assertThat(resp.status(), equalTo(RestStatus.OK)); + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnector(connectorIdToDelete)); + + expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete)); + } + + private DeleteResponse awaitDeleteConnector(String connectorId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.deleteConnector(connectorId, new ActionListener<>() { + @Override + public void onResponse(DeleteResponse deleteResponse) { + resp.set(deleteResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for delete request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from delete request", resp.get()); + return resp.get(); } private DocWriteResponse awaitPutConnector(Connector connector) throws Exception { @@ -66,4 +110,54 @@ public void onFailure(Exception e) { return resp.get(); } + private Connector awaitGetConnector(String connectorId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.getConnector(connectorId, new ActionListener<>() { + @Override + public void onResponse(Connector connector) { + resp.set(connector); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for get request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from get request", resp.get()); + return resp.get(); + } + + private ConnectorIndexService.ConnectorResult awaitListConnector(int from, int size) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.listConnectors(from, size, new ActionListener<>() { + @Override + public void onResponse(ConnectorIndexService.ConnectorResult result) { + resp.set(result); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for list request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from list request", resp.get()); + return resp.get(); + } + } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java index 9f9269d60dc70..f4a92e51e8c6a 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIngestPipelineTests.java @@ -7,16 +7,24 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import org.junit.Before; import java.io.IOException; import java.util.List; import static java.util.Collections.emptyList; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.equalTo; public class ConnectorIngestPipelineTests extends ESTestCase { @@ -38,6 +46,26 @@ public final void testRandomSerialization() throws IOException { } } + public void testToXContent() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + } + """); + + ConnectorIngestPipeline pipeline = ConnectorIngestPipeline.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(pipeline, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorIngestPipeline parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorIngestPipeline.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + private void assertTransportSerialization(ConnectorIngestPipeline testInstance) throws IOException { ConnectorIngestPipeline deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 59590b3d7e85e..52e9919924419 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -8,12 +8,20 @@ package org.elasticsearch.xpack.application.connector; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; +import org.elasticsearch.xpack.application.connector.filtering.FilteringAdvancedSnippet; +import org.elasticsearch.xpack.application.connector.filtering.FilteringPolicy; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRule; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRuleCondition; +import org.elasticsearch.xpack.application.connector.filtering.FilteringRules; +import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationInfo; +import org.elasticsearch.xpack.application.connector.filtering.FilteringValidationState; import org.elasticsearch.xpack.core.scheduler.Cron; import java.time.Instant; import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; @@ -22,6 +30,7 @@ import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomList; import static org.elasticsearch.test.ESTestCase.randomLong; +import static org.elasticsearch.test.ESTestCase.randomLongBetween; public final class ConnectorTestUtils { public static PutConnectorAction.Request getRandomPutConnectorActionRequest() { @@ -73,19 +82,18 @@ public static ConnectorSyncInfo getRandomConnectorSyncInfo() { } public static ConnectorFeatures getRandomConnectorFeatures() { - return new ConnectorFeatures.Builder().setDocumentLevelSecurityEnabled(randomFrom(new Boolean[] { null, randomBoolean() })) + return new ConnectorFeatures.Builder().setDocumentLevelSecurityEnabled(randomBoolean() ? randomConnectorFeatureEnabled() : null) .setFilteringRules(randomFrom(new Boolean[] { null, randomBoolean() })) .setFilteringAdvancedConfig(randomFrom(new Boolean[] { null, randomBoolean() })) - .setIncrementalSyncEnabled(randomFrom(new Boolean[] { null, randomBoolean() })) - .setSyncRulesAdvancedEnabled(randomFrom(new Boolean[] { null, randomBoolean() })) - .setSyncRulesBasicEnabled(randomFrom(new Boolean[] { null, randomBoolean() })) + .setIncrementalSyncEnabled(randomBoolean() ? randomConnectorFeatureEnabled() : null) + .setSyncRulesFeatures(randomBoolean() ? randomSyncRulesFeatures() : null) .build(); } public static ConnectorCustomSchedule getRandomConnectorCustomSchedule() { return new ConnectorCustomSchedule.Builder().setInterval(getRandomCronExpression()) .setEnabled(randomBoolean()) - .setLastSynced(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) + .setLastSynced(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLongBetween(0, 10000)) })) .setName(randomAlphaOfLength(10)) .setConfigurationOverrides( new ConnectorCustomSchedule.ConfigurationOverrides.Builder().setMaxCrawlDepth(randomInt()) @@ -103,12 +111,15 @@ public static ConnectorFiltering getRandomConnectorFiltering() { Instant currentTimestamp = Instant.now(); return new ConnectorFiltering.Builder().setActive( - new ConnectorFiltering.FilteringRules.Builder().setAdvancedSnippetCreatedAt(currentTimestamp) - .setAdvancedSnippetUpdatedAt(currentTimestamp) - .setAdvancedSnippetValue(Collections.emptyMap()) + new FilteringRules.Builder().setAdvancedSnippet( + new FilteringAdvancedSnippet.Builder().setAdvancedSnippetCreatedAt(currentTimestamp) + .setAdvancedSnippetUpdatedAt(currentTimestamp) + .setAdvancedSnippetValue(Collections.emptyMap()) + .build() + ) .setRules( List.of( - new ConnectorFiltering.FilteringRule.Builder().setCreatedAt(currentTimestamp) + new FilteringRule.Builder().setCreatedAt(currentTimestamp) .setField(randomAlphaOfLength(10)) .setId(randomAlphaOfLength(10)) .setOrder(randomInt()) @@ -119,18 +130,24 @@ public static ConnectorFiltering getRandomConnectorFiltering() { .build() ) ) - .setValidationErrors(Collections.emptyList()) - .setValidationState(getRandomFilteringValidationState()) + .setFilteringValidationInfo( + new FilteringValidationInfo.Builder().setValidationErrors(Collections.emptyList()) + .setValidationState(getRandomFilteringValidationState()) + .build() + ) .build() ) .setDomain(randomAlphaOfLength(10)) .setDraft( - new ConnectorFiltering.FilteringRules.Builder().setAdvancedSnippetCreatedAt(currentTimestamp) - .setAdvancedSnippetUpdatedAt(currentTimestamp) - .setAdvancedSnippetValue(Collections.emptyMap()) + new FilteringRules.Builder().setAdvancedSnippet( + new FilteringAdvancedSnippet.Builder().setAdvancedSnippetCreatedAt(currentTimestamp) + .setAdvancedSnippetUpdatedAt(currentTimestamp) + .setAdvancedSnippetValue(Collections.emptyMap()) + .build() + ) .setRules( List.of( - new ConnectorFiltering.FilteringRule.Builder().setCreatedAt(currentTimestamp) + new FilteringRule.Builder().setCreatedAt(currentTimestamp) .setField(randomAlphaOfLength(10)) .setId(randomAlphaOfLength(10)) .setOrder(randomInt()) @@ -141,8 +158,11 @@ public static ConnectorFiltering getRandomConnectorFiltering() { .build() ) ) - .setValidationErrors(Collections.emptyList()) - .setValidationState(getRandomFilteringValidationState()) + .setFilteringValidationInfo( + new FilteringValidationInfo.Builder().setValidationErrors(Collections.emptyList()) + .setValidationState(getRandomFilteringValidationState()) + .build() + ) .build() ) .build(); @@ -152,7 +172,7 @@ public static Connector getRandomConnector() { return new Connector.Builder().setConnectorId(randomAlphaOfLength(10)) .setApiKeyId(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setConfiguration(Collections.emptyMap()) - .setCustomScheduling(randomBoolean() ? getRandomConnectorCustomSchedule() : null) + .setCustomScheduling(Map.of(randomAlphaOfLengthBetween(5, 10), getRandomConnectorCustomSchedule())) .setDescription(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setFeatures(randomBoolean() ? getRandomConnectorFeatures() : null) @@ -165,11 +185,21 @@ public static Connector getRandomConnector() { .setPipeline(randomBoolean() ? getRandomConnectorIngestPipeline() : null) .setScheduling(randomBoolean() ? getRandomConnectorScheduling() : null) .setStatus(getRandomConnectorStatus()) - .setSyncCursor(randomFrom(new Object[] { null, randomAlphaOfLength(1) })) + .setSyncCursor(randomBoolean() ? Map.of("foo", "bar") : null) .setSyncNow(randomBoolean()) .build(); } + private static ConnectorFeatures.FeatureEnabled randomConnectorFeatureEnabled() { + return new ConnectorFeatures.FeatureEnabled(randomBoolean()); + } + + private static ConnectorFeatures.SyncRulesFeatures randomSyncRulesFeatures() { + return new ConnectorFeatures.SyncRulesFeatures.Builder().setSyncRulesAdvancedEnabled( + randomBoolean() ? randomConnectorFeatureEnabled() : null + ).setSyncRulesBasicEnabled(randomBoolean() ? randomConnectorFeatureEnabled() : null).build(); + } + /** * Second (0 - 59) Minute (0 - 59) Hour (0 - 23) Day of month (1 - 31) Month (1 - 12) */ @@ -197,18 +227,18 @@ private static ConnectorStatus getRandomConnectorStatus() { return values[randomInt(values.length - 1)]; } - private static ConnectorFiltering.FilteringPolicy getRandomFilteringPolicy() { - ConnectorFiltering.FilteringPolicy[] values = ConnectorFiltering.FilteringPolicy.values(); + private static FilteringPolicy getRandomFilteringPolicy() { + FilteringPolicy[] values = FilteringPolicy.values(); return values[randomInt(values.length - 1)]; } - private static ConnectorFiltering.FilteringRuleCondition getRandomFilteringRule() { - ConnectorFiltering.FilteringRuleCondition[] values = ConnectorFiltering.FilteringRuleCondition.values(); + private static FilteringRuleCondition getRandomFilteringRule() { + FilteringRuleCondition[] values = FilteringRuleCondition.values(); return values[randomInt(values.length - 1)]; } - private static ConnectorFiltering.FilteringValidationState getRandomFilteringValidationState() { - ConnectorFiltering.FilteringValidationState[] values = ConnectorFiltering.FilteringValidationState.values(); + private static FilteringValidationState getRandomFilteringValidationState() { + FilteringValidationState[] values = FilteringValidationState.values(); return values[randomInt(values.length - 1)]; } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java index 7d200ff9bdd94..a83537f32f413 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java @@ -7,16 +7,24 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import org.junit.Before; import java.io.IOException; import java.util.List; import static java.util.Collections.emptyList; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.equalTo; public class ConnectorTests extends ESTestCase { @@ -38,6 +46,150 @@ public final void testRandomSerialization() throws IOException { } } + public void testToXContent() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "api_key_id": "test", + "connector_id": "test-connector", + "custom_scheduling": { + "schedule-key": { + "configuration_overrides": { + "domain_allowlist": [ + "https://example.com" + ], + "max_crawl_depth": 1, + "seed_urls": [ + "https://example.com/blog", + "https://example.com/info" + ], + "sitemap_discovery_disabled": true, + "sitemap_urls": [ + "https://example.com/sitemap.xml" + ] + }, + "enabled": true, + "interval": "0 0 12 * * ?", + "last_synced": null, + "name": "My Schedule" + } + }, + "configuration": {}, + "description": "test-connector", + "features": { + "document_level_security": { + "enabled": true + }, + "filtering_advanced_config": true, + "sync_rules": { + "advanced": { + "enabled": false + }, + "basic": { + "enabled": true + } + } + }, + "filtering": [ + { + "active": { + "advanced_snippet": { + "created_at": "2023-11-09T15:13:08.231Z", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-11-09T15:13:08.231Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-11-09T15:13:08.231Z", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-11-09T15:13:08.231Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + ], + "index_name": "search-test", + "is_native": true, + "language": "polish", + "last_access_control_sync_error": "some error", + "last_access_control_sync_scheduled_at": "2023-11-09T15:13:08.231Z", + "last_access_control_sync_status": "pending", + "last_deleted_document_count": 42, + "last_incremental_sync_scheduled_at": "2023-11-09T15:13:08.231Z", + "last_indexed_document_count": 42, + "last_seen": "2023-11-09T15:13:08.231Z", + "last_sync_error": "some error", + "last_sync_scheduled_at": "2024-11-09T15:13:08.231Z", + "last_sync_status": "completed", + "last_synced": "2024-11-09T15:13:08.231Z", + "name": "test-name", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "scheduling": { + "access_control": { + "enabled": false, + "interval": "0 0 0 * * ?" + }, + "full": { + "enabled": false, + "interval": "0 0 0 * * ?" + }, + "incremental": { + "enabled": false, + "interval": "0 0 0 * * ?" + } + }, + "service_type": "google_drive", + "status": "needs_configuration", + "sync_now": false + }"""); + + Connector connector = Connector.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(connector, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + Connector parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = Connector.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + private void assertTransportSerialization(Connector testInstance) throws IOException { Connector deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..7588db45f5f75 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/DeleteConnectorActionRequestBWCSerializingTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class DeleteConnectorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return DeleteConnectorAction.Request::new; + } + + @Override + protected DeleteConnectorAction.Request createTestInstance() { + return new DeleteConnectorAction.Request(randomAlphaOfLengthBetween(1, 10)); + } + + @Override + protected DeleteConnectorAction.Request mutateInstance(DeleteConnectorAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected DeleteConnectorAction.Request doParseInstance(XContentParser parser) throws IOException { + return DeleteConnectorAction.Request.parse(parser); + } + + @Override + protected DeleteConnectorAction.Request mutateInstanceForVersion(DeleteConnectorAction.Request instance, TransportVersion version) { + return new DeleteConnectorAction.Request(instance.getConnectorId()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..124a068abce93 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionRequestBWCSerializingTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class GetConnectorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return GetConnectorAction.Request::new; + } + + @Override + protected GetConnectorAction.Request createTestInstance() { + return new GetConnectorAction.Request(randomAlphaOfLengthBetween(1, 10)); + } + + @Override + protected GetConnectorAction.Request mutateInstance(GetConnectorAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected GetConnectorAction.Request doParseInstance(XContentParser parser) throws IOException { + return GetConnectorAction.Request.parse(parser); + } + + @Override + protected GetConnectorAction.Request mutateInstanceForVersion(GetConnectorAction.Request instance, TransportVersion version) { + return new GetConnectorAction.Request(instance.getConnectorId()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..bcb1bcc86402f --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class GetConnectorActionResponseBWCSerializingTests extends AbstractBWCSerializationTestCase { + + private Connector connector; + + @Override + protected Writeable.Reader instanceReader() { + return GetConnectorAction.Response::new; + } + + @Override + protected GetConnectorAction.Response createTestInstance() { + this.connector = ConnectorTestUtils.getRandomConnector(); + return new GetConnectorAction.Response(this.connector); + } + + @Override + protected GetConnectorAction.Response mutateInstance(GetConnectorAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected GetConnectorAction.Response doParseInstance(XContentParser parser) throws IOException { + return GetConnectorAction.Response.fromXContent(parser); + } + + @Override + protected GetConnectorAction.Response mutateInstanceForVersion(GetConnectorAction.Response instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..b31c3e90b7403 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class ListConnectorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { + @Override + protected Writeable.Reader instanceReader() { + return ListConnectorAction.Request::new; + } + + @Override + protected ListConnectorAction.Request createTestInstance() { + PageParams pageParams = SearchApplicationTestUtils.randomPageParams(); + return new ListConnectorAction.Request(pageParams); + } + + @Override + protected ListConnectorAction.Request mutateInstance(ListConnectorAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected ListConnectorAction.Request doParseInstance(XContentParser parser) throws IOException { + return ListConnectorAction.Request.parse(parser); + } + + @Override + protected ListConnectorAction.Request mutateInstanceForVersion(ListConnectorAction.Request instance, TransportVersion version) { + return new ListConnectorAction.Request(instance.getPageParams()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..1e4ee0d086462 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionResponseBWCSerializingTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class ListConnectorActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + @Override + protected Writeable.Reader instanceReader() { + return ListConnectorAction.Response::new; + } + + @Override + protected ListConnectorAction.Response createTestInstance() { + return new ListConnectorAction.Response(randomList(10, ConnectorTestUtils::getRandomConnector), randomLongBetween(0, 100)); + } + + @Override + protected ListConnectorAction.Response mutateInstance(ListConnectorAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected ListConnectorAction.Response mutateInstanceForVersion(ListConnectorAction.Response instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5c70b64aa7b3a..99d3c7e78a478 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -123,6 +123,9 @@ public class Constants { "cluster:admin/xpack/ccr/auto_follow_pattern/put", "cluster:admin/xpack/ccr/pause_follow", "cluster:admin/xpack/ccr/resume_follow", + "cluster:admin/xpack/connector/delete", + "cluster:admin/xpack/connector/get", + "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", From 27c7e1fd21b97a9d860ad12be05ebe07149d4158 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 30 Nov 2023 09:37:14 +0100 Subject: [PATCH 093/263] Add simple stress test to EnrichMultiNodeIT (#102751) Add a simple test that forces enrich tasks to queue up to guard against bugs around holding on to multi search respones in this logic, now that we are introducing ref counting to them. relates #102479 --- .../xpack/enrich/EnrichMultiNodeIT.java | 62 +++++++++++++++---- 1 file changed, 51 insertions(+), 11 deletions(-) diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java index 5416741c8743d..b81a5e6b902b3 100644 --- a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Strings; import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.Plugin; @@ -53,6 +54,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsInRelativeOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; @@ -82,6 +84,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { // TODO Change this to run with security enabled // https://github.com/elastic/elasticsearch/issues/75940 .put(XPackSettings.SECURITY_ENABLED.getKey(), false) + .put("thread_pool.write.size", 2) .build(); } @@ -135,6 +138,25 @@ public void testEnrich() { enrich(keys, randomFrom(nodes)); } + public void testStressEnrich() { + List nodes = internalCluster().startNodes( + 3, + Settings.builder().put("enrich.coordinator_proxy.max_concurrent_requests", 1).build() + ); + int indices = randomIntBetween(5, 10); + final Map> keys = Maps.newHashMapWithExpectedSize(indices); + for (int i = 0; i < indices; i++) { + final String indexName = "index-" + i; + List k = createSourceIndex(indexName, 64); + final String policyName = "policy-" + i; + createAndExecutePolicy(policyName, indexName); + final String pipelineName = "pipeline-" + i; + createPipeline(policyName, pipelineName); + keys.put(pipelineName, k); + } + enrich(keys, randomFrom(nodes), 50); + } + public void testEnrichDedicatedIngestNode() { internalCluster().startNode(); String ingestOnlyNode = internalCluster().startNode(ingestOnlyNode()); @@ -210,13 +232,19 @@ public void testExecutePolicyNeverOnElectedMaster() throws Exception { } private static void enrich(List keys, String coordinatingNode) { - int numDocs = 256; + enrich(Map.of(PIPELINE_NAME, keys), coordinatingNode, 256); + } + + private static void enrich(Map> keys, String coordinatingNode, int numDocs) { + final String[] executedPipeline = new String[2 * numDocs]; BulkRequest bulkRequest = new BulkRequest("my-index"); for (int i = 0; i < numDocs; i++) { + final String pipeline = randomFrom(keys.keySet()); + executedPipeline[i] = pipeline; IndexRequest indexRequest = new IndexRequest(); indexRequest.id(Integer.toString(i)); - indexRequest.setPipeline(PIPELINE_NAME); - indexRequest.source(Map.of(MATCH_FIELD, randomFrom(keys))); + indexRequest.setPipeline(pipeline); + indexRequest.source(Map.of(MATCH_FIELD, randomFrom(keys.get(pipeline)))); bulkRequest.add(indexRequest); } BulkResponse bulkResponse = client(coordinatingNode).bulk(bulkRequest).actionGet(); @@ -231,7 +259,7 @@ private static void enrich(List keys, String coordinatingNode) { Map source = getResponse.getSourceAsMap(); Map userEntry = (Map) source.get("user"); assertThat(userEntry.size(), equalTo(DECORATE_FIELDS.length + 1)); - assertThat(keys.contains(userEntry.get(MATCH_FIELD)), is(true)); + assertThat(keys.get(executedPipeline[i]), containsInRelativeOrder(userEntry.get(MATCH_FIELD))); for (String field : DECORATE_FIELDS) { assertThat(userEntry.get(field), notNullValue()); } @@ -250,6 +278,10 @@ private static void enrich(List keys, String coordinatingNode) { } private static List createSourceIndex(int numDocs) { + return createSourceIndex(SOURCE_INDEX_NAME, numDocs); + } + + private static List createSourceIndex(String indexName, int numDocs) { Set keys = new HashSet<>(); for (int i = 0; i < numDocs; i++) { String key; @@ -257,7 +289,7 @@ private static List createSourceIndex(int numDocs) { key = randomAlphaOfLength(16); } while (keys.add(key) == false); - IndexRequest indexRequest = new IndexRequest(SOURCE_INDEX_NAME); + IndexRequest indexRequest = new IndexRequest(indexName); indexRequest.create(true); indexRequest.id(key); indexRequest.source( @@ -274,23 +306,27 @@ private static List createSourceIndex(int numDocs) { ); client().index(indexRequest).actionGet(); } - indicesAdmin().refresh(new RefreshRequest(SOURCE_INDEX_NAME)).actionGet(); + indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); return List.copyOf(keys); } private static void createAndExecutePolicy() { + createAndExecutePolicy(POLICY_NAME, SOURCE_INDEX_NAME); + } + + private static void createAndExecutePolicy(String policyName, String indexName) { EnrichPolicy enrichPolicy = new EnrichPolicy( EnrichPolicy.MATCH_TYPE, null, - List.of(SOURCE_INDEX_NAME), + List.of(indexName), MATCH_FIELD, List.of(DECORATE_FIELDS) ); - PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(POLICY_NAME, enrichPolicy); + PutEnrichPolicyAction.Request request = new PutEnrichPolicyAction.Request(policyName, enrichPolicy); client().execute(PutEnrichPolicyAction.INSTANCE, request).actionGet(); final ActionFuture policyExecuteFuture = client().execute( ExecuteEnrichPolicyAction.INSTANCE, - new ExecuteEnrichPolicyAction.Request(POLICY_NAME) + new ExecuteEnrichPolicyAction.Request(policyName) ); // Make sure we can deserialize enrich policy execution task status final List tasks = clusterAdmin().prepareListTasks().setActions(EnrichPolicyExecutor.TASK_ACTION).get().getTasks(); @@ -307,11 +343,15 @@ private static void createAndExecutePolicy() { } private static void createPipeline() { + createPipeline(POLICY_NAME, PIPELINE_NAME); + } + + private static void createPipeline(String policyName, String pipelineName) { String pipelineBody = Strings.format(""" { "processors": [ { "enrich": { "policy_name": "%s", "field": "%s", "target_field": "user" } } ] - }""", POLICY_NAME, MATCH_FIELD); - PutPipelineRequest request = new PutPipelineRequest(PIPELINE_NAME, new BytesArray(pipelineBody), XContentType.JSON); + }""", policyName, MATCH_FIELD); + PutPipelineRequest request = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); clusterAdmin().putPipeline(request).actionGet(); } } From 75c5b870b4eb68fe0049a2e5f5395c938d031c11 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 30 Nov 2023 09:13:46 +0000 Subject: [PATCH 094/263] Use a real UPGRADE_TO_LUCENE_9_9 TransportVersion id --- server/src/main/java/org/elasticsearch/TransportVersions.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index cdae87a0b9bd1..84445e9e0bb43 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -181,9 +181,7 @@ static TransportVersion def(int id) { public static final TransportVersion GET_API_KEY_INVALIDATION_TIME_ADDED = def(8_548_00_0); public static final TransportVersion ML_INFERENCE_GET_MULTIPLE_MODELS = def(8_549_00_0); public static final TransportVersion INFERENCE_SERVICE_RESULTS_ADDED = def(8_550_00_0); - - // Placeholder for features that require the next lucene version. Its id needs to be adjusted when merging lucene_snapshot into main. - public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_900_00_0); + public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_551_00_0); /* * STOP! READ THIS FIRST! No, really, From 12906cd8f33dae7c8d1169b3aa6d55f3f4bfe065 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 30 Nov 2023 09:21:53 +0000 Subject: [PATCH 095/263] Fix IndexVersion to use Lucene 9.9 --- .../src/main/java/org/elasticsearch/index/IndexVersions.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7c99764e44283..eb3a7dd075f9f 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -89,9 +89,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion NEW_SPARSE_VECTOR = def(8_500_001, Version.LUCENE_9_7_0); public static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT = def(8_500_002, Version.LUCENE_9_7_0); public static final IndexVersion UPGRADE_LUCENE_9_8 = def(8_500_003, Version.LUCENE_9_8_0); - public static final IndexVersion ES_VERSION_8_12 = def(8_500_004, Version.LUCENE_9_8_0); - - public static final IndexVersion UPGRADE_TO_LUCENE_9_9 = def(8_500_010, Version.LUCENE_9_9_0); + public static final IndexVersion UPGRADE_LUCENE_9_9 = def(8_500_004, Version.LUCENE_9_9_0); /* * STOP! READ THIS FIRST! No, really, From 4cd97d534a6992ce3213ec2665e4626531ed5736 Mon Sep 17 00:00:00 2001 From: Julia Bardi <90178898+juliaElastic@users.noreply.github.com> Date: Thu, 30 Nov 2023 10:34:07 +0100 Subject: [PATCH 096/263] Update KibanaOwnedReservedRoleDescriptors.java (#102758) --- .../authz/store/KibanaOwnedReservedRoleDescriptors.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index ac061d31d962e..6a1da2e0ddfa0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -197,6 +197,8 @@ static RoleDescriptor kibanaSystem(String name) { RoleDescriptor.IndicesPrivileges.builder().indices("logs-elastic_agent*").privileges("read").build(), // Fleet publishes Agent metrics in kibana task runner RoleDescriptor.IndicesPrivileges.builder().indices("metrics-fleet_server*").privileges("all").build(), + // Fleet reads output health from this index pattern + RoleDescriptor.IndicesPrivileges.builder().indices("logs-fleet_server*").privileges("read").build(), // Legacy "Alerts as data" used in Security Solution. // Kibana user creates these indices; reads / writes to them. RoleDescriptor.IndicesPrivileges.builder().indices(ReservedRolesStore.ALERTS_LEGACY_INDEX).privileges("all").build(), From e931376eb51be76ab30a4ffb7ab51cf67beab5cf Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Thu, 30 Nov 2023 11:22:19 +0100 Subject: [PATCH 097/263] [Enterprise Search] Add POST connector sync job (#102801) Add POST connector sync job endpoint. --- .../api/connector_sync_job.post.json | 33 ++ .../org/elasticsearch/test/ESTestCase.java | 11 + .../entsearch/400_connector_sync_job_post.yml | 74 +++ .../ent-search/src/main/java/module-info.java | 2 + .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 50 +- .../connector/ConnectorSyncStatus.java | 10 + .../connector/syncjob/ConnectorSyncJob.java | 485 ++++++++++++++++++ .../syncjob/ConnectorSyncJobIndexService.java | 159 ++++++ .../ConnectorSyncJobTriggerMethod.java | 30 ++ .../syncjob/ConnectorSyncJobType.java | 31 ++ .../action/PostConnectorSyncJobAction.java | 206 ++++++++ .../RestPostConnectorSyncJobAction.java | 46 ++ .../TransportPostConnectorSyncJobAction.java | 52 ++ .../connector/ConnectorTestUtils.java | 12 +- .../ConnectorSyncJobIndexServiceTests.java | 220 ++++++++ .../syncjob/ConnectorSyncJobTestUtils.java | 88 ++++ .../syncjob/ConnectorSyncJobTests.java | 47 ++ .../ConnectorSyncJobTriggerMethodTests.java | 26 + .../syncjob/ConnectorSyncJobTypeTests.java | 25 + ...ncJobActionRequestBWCSerializingTests.java | 48 ++ ...cJobActionResponseBWCSerializingTests.java | 43 ++ .../PostConnectorSyncJobActionTests.java | 41 ++ ...nsportPostConnectorSyncJobActionTests.java | 75 +++ .../xpack/security/operator/Constants.java | 1 + 25 files changed, 1810 insertions(+), 10 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTriggerMethod.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobType.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTriggerMethodTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTypeTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionResponseBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json new file mode 100644 index 0000000000000..563d0022d90d3 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json @@ -0,0 +1,33 @@ +{ + "connector_sync_job.post": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Creates a connector sync job." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job", + "methods": [ + "POST" + ] + } + ] + }, + "body": { + "description": "The connector sync job data.", + "required": true + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 4391629455933..a597142ae1ed0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -144,6 +144,7 @@ import java.security.NoSuchProviderException; import java.security.Provider; import java.security.SecureRandom; +import java.time.Instant; import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; @@ -785,6 +786,16 @@ public static long randomLongBetween(long min, long max) { return RandomNumbers.randomLongBetween(random(), min, max); } + /** + * @return a random instant between a min and a max value with a random nanosecond precision + */ + public static Instant randomInstantBetween(Instant minInstant, Instant maxInstant) { + return Instant.ofEpochSecond( + randomLongBetween(minInstant.getEpochSecond(), maxInstant.getEpochSecond()), + randomLongBetween(0, 999999999) + ); + } + /** * The maximum value that can be represented as an unsigned long. */ diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml new file mode 100644 index 0000000000000..055221b917cb1 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml @@ -0,0 +1,74 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +'Create connector sync job': + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - match: { id: $id } + +--- +'Create connector sync job with missing job type': + - do: + connector_sync_job.post: + body: + id: test-connector + trigger_method: on_demand + - set: { id: id } + - match: { id: $id } + +--- +'Create connector sync job with missing trigger method': + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + - set: { id: id } + - match: { id: $id } + +--- +'Create connector sync job with non-existing connector id': + - do: + connector_sync_job.post: + body: + id: non-existing-id + job_type: full + trigger_method: on_demand + catch: missing + +--- +'Create connector sync job with invalid job type': + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: invalid_job_type + trigger_method: on_demand + catch: bad_request + +--- +'Create connector sync job with invalid trigger method': + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: invalid_trigger_method + catch: bad_request diff --git a/x-pack/plugin/ent-search/src/main/java/module-info.java b/x-pack/plugin/ent-search/src/main/java/module-info.java index c1fdc23f61ae9..d8cbceda4d8a3 100644 --- a/x-pack/plugin/ent-search/src/main/java/module-info.java +++ b/x-pack/plugin/ent-search/src/main/java/module-info.java @@ -35,6 +35,8 @@ exports org.elasticsearch.xpack.application.rules.action; exports org.elasticsearch.xpack.application.connector; exports org.elasticsearch.xpack.application.connector.action; + exports org.elasticsearch.xpack.application.connector.syncjob; + exports org.elasticsearch.xpack.application.connector.syncjob.action; provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.xpack.application.EnterpriseSearchFeatures; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index ce54474994acf..819c345392c65 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -54,6 +54,9 @@ import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; import org.elasticsearch.xpack.application.rules.RuleQueryBuilder; @@ -173,6 +176,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class) ) ); + actionHandlers.add(new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class)); } return Collections.unmodifiableList(actionHandlers); @@ -227,6 +231,7 @@ public List getRestHandlers( new RestPutConnectorAction() ) ); + restHandlers.add(new RestPostConnectorSyncJobAction()); } return Collections.unmodifiableList(restHandlers); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index d0787e62113ec..bdee310612e18 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -9,9 +9,9 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -28,6 +28,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -60,7 +61,9 @@ *
  • A boolean flag 'syncNow', which, when set, triggers an immediate synchronization operation.
  • * */ -public class Connector implements Writeable, ToXContentObject { +public class Connector implements NamedWriteable, ToXContentObject { + + public static final String NAME = Connector.class.getName().toUpperCase(Locale.ROOT); private final String connectorId; @Nullable @@ -185,21 +188,21 @@ public Connector(StreamInput in) throws IOException { this.syncNow = in.readBoolean(); } - static final ParseField ID_FIELD = new ParseField("connector_id"); + public static final ParseField ID_FIELD = new ParseField("connector_id"); static final ParseField API_KEY_ID_FIELD = new ParseField("api_key_id"); - static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); + public static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); static final ParseField DESCRIPTION_FIELD = new ParseField("description"); static final ParseField ERROR_FIELD = new ParseField("error"); static final ParseField FEATURES_FIELD = new ParseField("features"); - static final ParseField FILTERING_FIELD = new ParseField("filtering"); - static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); + public static final ParseField FILTERING_FIELD = new ParseField("filtering"); + public static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); - static final ParseField LANGUAGE_FIELD = new ParseField("language"); + public static final ParseField LANGUAGE_FIELD = new ParseField("language"); static final ParseField NAME_FIELD = new ParseField("name"); - static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); + public static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); - static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); + public static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); static final ParseField STATUS_FIELD = new ParseField("status"); static final ParseField SYNC_CURSOR_FIELD = new ParseField("sync_cursor"); static final ParseField SYNC_NOW_FIELD = new ParseField("sync_now"); @@ -444,6 +447,30 @@ public String getConnectorId() { return connectorId; } + public List getFiltering() { + return filtering; + } + + public String getIndexName() { + return indexName; + } + + public String getLanguage() { + return language; + } + + public ConnectorIngestPipeline getPipeline() { + return pipeline; + } + + public String getServiceType() { + return serviceType; + } + + public Map getConfiguration() { + return configuration; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -495,6 +522,11 @@ public int hashCode() { ); } + @Override + public String getWriteableName() { + return NAME; + } + public static class Builder { private String connectorId; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncStatus.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncStatus.java index b138fc864881a..30fca79f78876 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncStatus.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncStatus.java @@ -30,6 +30,16 @@ public enum ConnectorSyncStatus { PENDING, SUSPENDED; + public static ConnectorSyncStatus fromString(String syncStatusString) { + for (ConnectorSyncStatus syncStatus : ConnectorSyncStatus.values()) { + if (syncStatus.toString().equalsIgnoreCase(syncStatusString)) { + return syncStatus; + } + } + + throw new IllegalArgumentException("Unknown sync status '" + syncStatusString + "'."); + } + @Override public String toString() { return name().toLowerCase(Locale.ROOT); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java new file mode 100644 index 0000000000000..6c0e9635d986d --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -0,0 +1,485 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; + +import java.io.IOException; +import java.time.Instant; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; + +/** + * Represents a sync job in the Elasticsearch ecosystem. Sync jobs refer to a unit of work, which syncs data from a 3rd party + * data source into an Elasticsearch index using the Connectors service. A ConnectorSyncJob always refers + * to a corresponding {@link Connector}. Each ConnectorSyncJob instance encapsulates various settings and state information, including: + *
      + *
    • A timestamp, when the sync job cancellation was requested.
    • + *
    • A timestamp, when the sync job was cancelled.
    • + *
    • A timestamp, when the sync job was completed.
    • + *
    • A subset of the {@link Connector} fields the sync job is referring to.
    • + *
    • A timestamp, when the sync job was created.
    • + *
    • The number of documents deleted by the sync job.
    • + *
    • An error, which might have appeared during the sync job execution.
    • + *
    • A unique identifier for distinguishing different connectors.
    • + *
    • The number of documents indexed by the sync job.
    • + *
    • The volume of the indexed documents.
    • + *
    • The {@link ConnectorSyncJobType} of the sync job.
    • + *
    • A timestamp, when the sync job was last seen by the Connectors service.
    • + *
    • A {@link Map} containing metadata of the sync job.
    • + *
    • A timestamp, when the sync job was started.
    • + *
    • The {@link ConnectorSyncStatus} of the connector.
    • + *
    • The total number of documents present in the index after the sync job completes.
    • + *
    • The {@link ConnectorSyncJobTriggerMethod} of the sync job.
    • + *
    • The hostname of the worker to run the sync job.
    • + *
    + */ +public class ConnectorSyncJob implements Writeable, ToXContentObject { + + static final ParseField CANCELATION_REQUESTED_AT_FIELD = new ParseField("cancelation_requested_at"); + + static final ParseField CANCELED_AT_FIELD = new ParseField("canceled_at"); + + static final ParseField COMPLETED_AT_FIELD = new ParseField("completed_at"); + + static final ParseField CONNECTOR_FIELD = new ParseField("connector"); + + static final ParseField CREATED_AT_FIELD = new ParseField("created_at"); + + static final ParseField DELETED_DOCUMENT_COUNT = new ParseField("deleted_document_count"); + + static final ParseField ERROR_FIELD = new ParseField("error"); + + public static final ParseField ID_FIELD = new ParseField("id"); + + static final ParseField INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("indexed_document_count"); + + static final ParseField INDEXED_DOCUMENT_VOLUME_FIELD = new ParseField("indexed_document_volume"); + + public static final ParseField JOB_TYPE_FIELD = new ParseField("job_type"); + + static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); + + static final ParseField METADATA_FIELD = new ParseField("metadata"); + + static final ParseField STARTED_AT_FIELD = new ParseField("started_at"); + + static final ParseField STATUS_FIELD = new ParseField("status"); + + static final ParseField TOTAL_DOCUMENT_COUNT_FIELD = new ParseField("total_document_count"); + + public static final ParseField TRIGGER_METHOD_FIELD = new ParseField("trigger_method"); + + static final ParseField WORKER_HOSTNAME_FIELD = new ParseField("worker_hostname"); + + static final ConnectorSyncStatus DEFAULT_INITIAL_STATUS = ConnectorSyncStatus.PENDING; + + static final ConnectorSyncJobType DEFAULT_JOB_TYPE = ConnectorSyncJobType.FULL; + + static final ConnectorSyncJobTriggerMethod DEFAULT_TRIGGER_METHOD = ConnectorSyncJobTriggerMethod.ON_DEMAND; + + private final Instant cancelationRequestedAt; + + @Nullable + private final Instant canceledAt; + + @Nullable + private final Instant completedAt; + + private final Connector connector; + + private final Instant createdAt; + + private final long deletedDocumentCount; + + @Nullable + private final String error; + + private final String id; + + private final long indexedDocumentCount; + + private final long indexedDocumentVolume; + + private final ConnectorSyncJobType jobType; + + @Nullable + private final Instant lastSeen; + + private final Map metadata; + + @Nullable + private final Instant startedAt; + + private final ConnectorSyncStatus status; + + @Nullable + private final long totalDocumentCount; + + private final ConnectorSyncJobTriggerMethod triggerMethod; + + @Nullable + private final String workerHostname; + + /** + * + * @param cancelationRequestedAt Timestamp when the sync job cancellation was requested. + * @param canceledAt Timestamp, when the sync job was cancelled. + * @param completedAt Timestamp, when the sync job was completed. + * @param connector Subset of connector fields the sync job is referring to. + * @param createdAt Timestamp, when the sync job was created. + * @param deletedDocumentCount Number of documents deleted by the sync job. + * @param error Error, which might have appeared during the sync job execution. + * @param id Unique identifier for distinguishing different connectors. + * @param indexedDocumentCount Number of documents indexed by the sync job. + * @param indexedDocumentVolume Volume of the indexed documents. + * @param jobType Job type of the sync job. + * @param lastSeen Timestamp, when the sync was last seen by the Connectors service. + * @param metadata Map containing metadata of the sync job. + * @param startedAt Timestamp, when the sync job was started. + * @param status Sync status of the connector. + * @param totalDocumentCount Total number of documents present in the index after the sync job completes. + * @param triggerMethod Trigger method of the sync job. + * @param workerHostname Hostname of the worker to run the sync job. + */ + private ConnectorSyncJob( + Instant cancelationRequestedAt, + Instant canceledAt, + Instant completedAt, + Connector connector, + Instant createdAt, + long deletedDocumentCount, + String error, + String id, + long indexedDocumentCount, + long indexedDocumentVolume, + ConnectorSyncJobType jobType, + Instant lastSeen, + Map metadata, + Instant startedAt, + ConnectorSyncStatus status, + long totalDocumentCount, + ConnectorSyncJobTriggerMethod triggerMethod, + String workerHostname + ) { + this.cancelationRequestedAt = cancelationRequestedAt; + this.canceledAt = canceledAt; + this.completedAt = completedAt; + this.connector = connector; + this.createdAt = createdAt; + this.deletedDocumentCount = deletedDocumentCount; + this.error = error; + this.id = Objects.requireNonNull(id, "[id] cannot be null"); + this.indexedDocumentCount = indexedDocumentCount; + this.indexedDocumentVolume = indexedDocumentVolume; + this.jobType = Objects.requireNonNullElse(jobType, ConnectorSyncJobType.FULL); + this.lastSeen = lastSeen; + this.metadata = Objects.requireNonNullElse(metadata, Collections.emptyMap()); + this.startedAt = startedAt; + this.status = status; + this.totalDocumentCount = totalDocumentCount; + this.triggerMethod = Objects.requireNonNullElse(triggerMethod, ConnectorSyncJobTriggerMethod.ON_DEMAND); + this.workerHostname = workerHostname; + } + + public ConnectorSyncJob(StreamInput in) throws IOException { + this.cancelationRequestedAt = in.readOptionalInstant(); + this.canceledAt = in.readOptionalInstant(); + this.completedAt = in.readOptionalInstant(); + this.connector = in.readNamedWriteable(Connector.class); + this.createdAt = in.readInstant(); + this.deletedDocumentCount = in.readLong(); + this.error = in.readOptionalString(); + this.id = in.readString(); + this.indexedDocumentCount = in.readLong(); + this.indexedDocumentVolume = in.readLong(); + this.jobType = in.readEnum(ConnectorSyncJobType.class); + this.lastSeen = in.readOptionalInstant(); + this.metadata = in.readMap(StreamInput::readString, StreamInput::readGenericValue); + this.startedAt = in.readOptionalInstant(); + this.status = in.readEnum(ConnectorSyncStatus.class); + this.totalDocumentCount = in.readOptionalLong(); + this.triggerMethod = in.readEnum(ConnectorSyncJobTriggerMethod.class); + this.workerHostname = in.readOptionalString(); + } + + public String getId() { + return id; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), cancelationRequestedAt); + builder.field(CANCELED_AT_FIELD.getPreferredName(), canceledAt); + builder.field(COMPLETED_AT_FIELD.getPreferredName(), completedAt); + + builder.startObject(CONNECTOR_FIELD.getPreferredName()); + { + builder.field(Connector.ID_FIELD.getPreferredName(), connector.getConnectorId()); + builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getFiltering()); + builder.field(Connector.INDEX_NAME_FIELD.getPreferredName(), connector.getIndexName()); + builder.field(Connector.LANGUAGE_FIELD.getPreferredName(), connector.getLanguage()); + builder.field(Connector.PIPELINE_FIELD.getPreferredName(), connector.getPipeline()); + builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), connector.getServiceType()); + builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), connector.getConfiguration()); + } + builder.endObject(); + + builder.field(CREATED_AT_FIELD.getPreferredName(), createdAt); + builder.field(DELETED_DOCUMENT_COUNT.getPreferredName(), deletedDocumentCount); + builder.field(ERROR_FIELD.getPreferredName(), error); + builder.field(ID_FIELD.getPreferredName(), id); + builder.field(INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), indexedDocumentCount); + builder.field(INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), indexedDocumentVolume); + builder.field(JOB_TYPE_FIELD.getPreferredName(), jobType); + builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); + builder.field(METADATA_FIELD.getPreferredName(), metadata); + builder.field(STARTED_AT_FIELD.getPreferredName(), startedAt); + builder.field(STATUS_FIELD.getPreferredName(), status); + builder.field(TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), totalDocumentCount); + builder.field(TRIGGER_METHOD_FIELD.getPreferredName(), triggerMethod); + builder.field(WORKER_HOSTNAME_FIELD.getPreferredName(), workerHostname); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalInstant(cancelationRequestedAt); + out.writeOptionalInstant(canceledAt); + out.writeOptionalInstant(completedAt); + out.writeNamedWriteable(connector); + out.writeInstant(createdAt); + out.writeLong(deletedDocumentCount); + out.writeOptionalString(error); + out.writeString(id); + out.writeLong(indexedDocumentCount); + out.writeLong(indexedDocumentVolume); + out.writeEnum(jobType); + out.writeOptionalInstant(lastSeen); + out.writeMap(metadata, StreamOutput::writeString, StreamOutput::writeGenericValue); + out.writeOptionalInstant(startedAt); + out.writeEnum(status); + out.writeOptionalLong(totalDocumentCount); + out.writeEnum(triggerMethod); + out.writeOptionalString(workerHostname); + } + + public boolean equals(Object other) { + if (this == other) return true; + if (other == null || getClass() != other.getClass()) return false; + + ConnectorSyncJob connectorSyncJob = (ConnectorSyncJob) other; + + return Objects.equals(cancelationRequestedAt, connectorSyncJob.cancelationRequestedAt) + && Objects.equals(canceledAt, connectorSyncJob.canceledAt) + && Objects.equals(completedAt, connectorSyncJob.completedAt) + && Objects.equals(connector, connectorSyncJob.connector) + && Objects.equals(createdAt, connectorSyncJob.createdAt) + && Objects.equals(deletedDocumentCount, connectorSyncJob.deletedDocumentCount) + && Objects.equals(error, connectorSyncJob.error) + && Objects.equals(id, connectorSyncJob.id) + && Objects.equals(indexedDocumentCount, connectorSyncJob.indexedDocumentCount) + && Objects.equals(indexedDocumentVolume, connectorSyncJob.indexedDocumentVolume) + && Objects.equals(jobType, connectorSyncJob.jobType) + && Objects.equals(lastSeen, connectorSyncJob.lastSeen) + && Objects.equals(metadata, connectorSyncJob.metadata) + && Objects.equals(startedAt, connectorSyncJob.startedAt) + && Objects.equals(status, connectorSyncJob.status) + && Objects.equals(totalDocumentCount, connectorSyncJob.totalDocumentCount) + && Objects.equals(triggerMethod, connectorSyncJob.triggerMethod) + && Objects.equals(workerHostname, connectorSyncJob.workerHostname); + } + + @Override + public int hashCode() { + return Objects.hash( + cancelationRequestedAt, + canceledAt, + completedAt, + connector, + createdAt, + deletedDocumentCount, + error, + id, + indexedDocumentCount, + indexedDocumentVolume, + jobType, + lastSeen, + metadata, + startedAt, + status, + totalDocumentCount, + triggerMethod, + workerHostname + ); + } + + public static class Builder { + private Instant cancellationRequestedAt; + + private Instant canceledAt; + + private Instant completedAt; + + private Connector connector; + + private Instant createdAt; + + private long deletedDocumentCount; + + private String error; + + private String id; + + private long indexedDocumentCount; + + private long indexedDocumentVolume; + + private ConnectorSyncJobType jobType; + + private Instant lastSeen; + + private Map metadata; + + private Instant startedAt; + + private ConnectorSyncStatus status; + + private long totalDocumentCount; + + private ConnectorSyncJobTriggerMethod triggerMethod; + + private String workerHostname; + + public Builder setCancellationRequestedAt(Instant cancellationRequestedAt) { + this.cancellationRequestedAt = cancellationRequestedAt; + return this; + } + + public Builder setCanceledAt(Instant canceledAt) { + this.canceledAt = canceledAt; + return this; + } + + public Builder setCompletedAt(Instant completedAt) { + this.completedAt = completedAt; + return this; + } + + public Builder setConnector(Connector connector) { + this.connector = connector; + return this; + } + + public Builder setCreatedAt(Instant createdAt) { + this.createdAt = createdAt; + return this; + } + + public Builder setDeletedDocumentCount(long deletedDocumentCount) { + this.deletedDocumentCount = deletedDocumentCount; + return this; + } + + public Builder setError(String error) { + this.error = error; + return this; + } + + public Builder setId(String id) { + this.id = id; + return this; + } + + public Builder setIndexedDocumentCount(long indexedDocumentCount) { + this.indexedDocumentCount = indexedDocumentCount; + return this; + } + + public Builder setIndexedDocumentVolume(long indexedDocumentVolume) { + this.indexedDocumentVolume = indexedDocumentVolume; + return this; + } + + public Builder setJobType(ConnectorSyncJobType jobType) { + this.jobType = jobType; + return this; + } + + public Builder setLastSeen(Instant lastSeen) { + this.lastSeen = lastSeen; + return this; + } + + public Builder setMetadata(Map metadata) { + this.metadata = metadata; + return this; + } + + public Builder setStartedAt(Instant startedAt) { + this.startedAt = startedAt; + return this; + } + + public Builder setStatus(ConnectorSyncStatus status) { + this.status = status; + return this; + } + + public Builder setTotalDocumentCount(long totalDocumentCount) { + this.totalDocumentCount = totalDocumentCount; + return this; + } + + public Builder setTriggerMethod(ConnectorSyncJobTriggerMethod triggerMethod) { + this.triggerMethod = triggerMethod; + return this; + } + + public Builder setWorkerHostname(String workerHostname) { + this.workerHostname = workerHostname; + return this; + } + + public ConnectorSyncJob build() { + return new ConnectorSyncJob( + cancellationRequestedAt, + canceledAt, + completedAt, + connector, + createdAt, + deletedDocumentCount, + error, + id, + indexedDocumentCount, + indexedDocumentVolume, + jobType, + lastSeen, + metadata, + startedAt, + status, + totalDocumentCount, + triggerMethod, + workerHostname + ); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java new file mode 100644 index 0000000000000..f259cb1e0a8c0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -0,0 +1,159 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorFiltering; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; +import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; +import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; +import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; + +import java.io.IOException; +import java.time.Instant; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; + +/** + * A service that manages persistent {@link ConnectorSyncJob} configurations. + */ +public class ConnectorSyncJobIndexService { + + private static final Long ZERO = 0L; + + private final Client clientWithOrigin; + + public static final String CONNECTOR_SYNC_JOB_INDEX_NAME = ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN; + + /** + * @param client A client for executing actions on the connectors sync jobs index. + */ + public ConnectorSyncJobIndexService(Client client) { + this.clientWithOrigin = new OriginSettingClient(client, CONNECTORS_ORIGIN); + } + + /** + * @param request Request for creating a connector sync job. + * @param listener Listener to respond to a successful response or an error. + */ + public void createConnectorSyncJob( + PostConnectorSyncJobAction.Request request, + ActionListener listener + ) { + try { + getSyncJobConnectorInfo(request.getId(), listener.delegateFailure((l, connector) -> { + Instant now = Instant.now(); + ConnectorSyncJobType jobType = Objects.requireNonNullElse(request.getJobType(), ConnectorSyncJob.DEFAULT_JOB_TYPE); + ConnectorSyncJobTriggerMethod triggerMethod = Objects.requireNonNullElse( + request.getTriggerMethod(), + ConnectorSyncJob.DEFAULT_TRIGGER_METHOD + ); + + try { + String syncJobId = generateId(); + + final IndexRequest indexRequest = new IndexRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).id(syncJobId) + .opType(DocWriteRequest.OpType.INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + ConnectorSyncJob syncJob = new ConnectorSyncJob.Builder().setId(syncJobId) + .setJobType(jobType) + .setTriggerMethod(triggerMethod) + .setStatus(ConnectorSyncJob.DEFAULT_INITIAL_STATUS) + .setConnector(connector) + .setCreatedAt(now) + .setLastSeen(now) + .setTotalDocumentCount(ZERO) + .setIndexedDocumentCount(ZERO) + .setIndexedDocumentVolume(ZERO) + .setDeletedDocumentCount(ZERO) + .build(); + + indexRequest.source(syncJob.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + + clientWithOrigin.index( + indexRequest, + ActionListener.wrap( + indexResponse -> listener.onResponse(new PostConnectorSyncJobAction.Response(indexResponse.getId())), + listener::onFailure + ) + ); + } catch (IOException e) { + listener.onFailure(e); + } + })); + } catch (Exception e) { + listener.onFailure(e); + } + } + + private String generateId() { + /* Workaround: only needed for generating an id upfront, autoGenerateId() has a side effect generating a timestamp, + * which would raise an error on the response layer later ("autoGeneratedTimestamp should not be set externally"). + * TODO: do we even need to copy the "_id" and set it as "id"? + */ + return UUIDs.base64UUID(); + } + + private void getSyncJobConnectorInfo(String connectorId, ActionListener listener) { + try { + + final GetRequest request = new GetRequest(ConnectorIndexService.CONNECTOR_INDEX_NAME, connectorId); + + clientWithOrigin.get(request, new ActionListener<>() { + @Override + public void onResponse(GetResponse response) { + final boolean connectorDoesNotExist = response.isExists() == false; + + if (connectorDoesNotExist) { + onFailure(new ResourceNotFoundException("Connector with id '" + connectorId + "' does not exist.")); + return; + } + + Map source = response.getSource(); + + @SuppressWarnings("unchecked") + final Connector syncJobConnectorInfo = new Connector.Builder().setConnectorId( + (String) source.get(Connector.ID_FIELD.getPreferredName()) + ) + .setFiltering((List) source.get(Connector.FILTERING_FIELD.getPreferredName())) + .setIndexName((String) source.get(Connector.INDEX_NAME_FIELD.getPreferredName())) + .setLanguage((String) source.get(Connector.LANGUAGE_FIELD.getPreferredName())) + .setPipeline((ConnectorIngestPipeline) source.get(Connector.PIPELINE_FIELD.getPreferredName())) + .setServiceType((String) source.get(Connector.SERVICE_TYPE_FIELD.getPreferredName())) + .setConfiguration((Map) source.get(Connector.CONFIGURATION_FIELD.getPreferredName())) + .build(); + + listener.onResponse(syncJobConnectorInfo); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTriggerMethod.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTriggerMethod.java new file mode 100644 index 0000000000000..110748795fb77 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTriggerMethod.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import java.util.Locale; + +public enum ConnectorSyncJobTriggerMethod { + ON_DEMAND, + SCHEDULED; + + public static ConnectorSyncJobTriggerMethod fromString(String triggerMethodString) { + for (ConnectorSyncJobTriggerMethod triggerMethod : ConnectorSyncJobTriggerMethod.values()) { + if (triggerMethod.name().equalsIgnoreCase(triggerMethodString)) { + return triggerMethod; + } + } + + throw new IllegalArgumentException("Unknown trigger method '" + triggerMethodString + "'."); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobType.java new file mode 100644 index 0000000000000..2d0a18da6fec5 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobType.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import java.util.Locale; + +public enum ConnectorSyncJobType { + FULL, + INCREMENTAL, + ACCESS_CONTROL; + + public static ConnectorSyncJobType fromString(String syncJobTypeString) { + for (ConnectorSyncJobType syncJobType : ConnectorSyncJobType.values()) { + if (syncJobType.name().equalsIgnoreCase(syncJobTypeString)) { + return syncJobType; + } + } + + throw new IllegalArgumentException("Unknown sync job type '" + syncJobTypeString + "'."); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java new file mode 100644 index 0000000000000..05da4dd798c83 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java @@ -0,0 +1,206 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTriggerMethod; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobType; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.application.EnterpriseSearch.CONNECTOR_API_ENDPOINT; + +public class PostConnectorSyncJobAction extends ActionType { + + public static final PostConnectorSyncJobAction INSTANCE = new PostConnectorSyncJobAction(); + + public static final String NAME = "cluster:admin/xpack/connector/sync_job/post"; + + public static final String CONNECTOR_SYNC_JOB_API_ENDPOINT = CONNECTOR_API_ENDPOINT + "/_sync_job"; + + private PostConnectorSyncJobAction() { + super(NAME, PostConnectorSyncJobAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + public static final String EMPTY_CONNECTOR_ID_ERROR_MESSAGE = "[id] of the connector cannot be null or empty"; + private final String id; + private final ConnectorSyncJobType jobType; + private final ConnectorSyncJobTriggerMethod triggerMethod; + + public Request(String id, ConnectorSyncJobType jobType, ConnectorSyncJobTriggerMethod triggerMethod) { + this.id = id; + this.jobType = jobType; + this.triggerMethod = triggerMethod; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + this.jobType = in.readOptionalEnum(ConnectorSyncJobType.class); + this.triggerMethod = in.readOptionalEnum(ConnectorSyncJobTriggerMethod.class); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_sync_job_post_request", + false, + ((args) -> { + String connectorId = (String) args[0]; + String syncJobTypeString = (String) args[1]; + String triggerMethodString = (String) args[2]; + + boolean syncJobTypeSpecified = syncJobTypeString != null; + boolean triggerMethodSpecified = triggerMethodString != null; + + return new Request( + connectorId, + syncJobTypeSpecified ? ConnectorSyncJobType.fromString(syncJobTypeString) : null, + triggerMethodSpecified ? ConnectorSyncJobTriggerMethod.fromString(triggerMethodString) : null + ); + }) + ); + + static { + PARSER.declareString(constructorArg(), ConnectorSyncJob.ID_FIELD); + PARSER.declareString(optionalConstructorArg(), ConnectorSyncJob.JOB_TYPE_FIELD); + PARSER.declareString(optionalConstructorArg(), ConnectorSyncJob.TRIGGER_METHOD_FIELD); + } + + public String getId() { + return id; + } + + public ConnectorSyncJobType getJobType() { + return jobType; + } + + public ConnectorSyncJobTriggerMethod getTriggerMethod() { + return triggerMethod; + } + + public static Request fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return Request.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static Request fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("id", id); + builder.field("job_type", jobType); + builder.field("trigger_method", triggerMethod); + } + builder.endObject(); + return builder; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(getId())) { + validationException = addValidationError(EMPTY_CONNECTOR_ID_ERROR_MESSAGE, validationException); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeOptionalEnum(jobType); + out.writeOptionalEnum(triggerMethod); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(id, request.id) && jobType == request.jobType && triggerMethod == request.triggerMethod; + } + + @Override + public int hashCode() { + return Objects.hash(id, jobType, triggerMethod); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final String id; + + public Response(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + public Response(String id) { + this.id = id; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + } + + public String getId() { + return id; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("id", id); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(id, response.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java new file mode 100644 index 0000000000000..2a1b9d15d2451 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestPostConnectorSyncJobAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_sync_job_post_action"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/" + PostConnectorSyncJobAction.CONNECTOR_SYNC_JOB_API_ENDPOINT)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + PostConnectorSyncJobAction.Request request = PostConnectorSyncJobAction.Request.fromXContentBytes( + restRequest.content(), + restRequest.getXContentType() + ); + + return channel -> client.execute( + PostConnectorSyncJobAction.INSTANCE, + request, + new RestToXContentListener<>(channel, r -> RestStatus.CREATED, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobAction.java new file mode 100644 index 0000000000000..73889195d0e08 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportPostConnectorSyncJobAction extends HandledTransportAction< + PostConnectorSyncJobAction.Request, + PostConnectorSyncJobAction.Response> { + + protected final ConnectorSyncJobIndexService syncJobIndexService; + + @Inject + public TransportPostConnectorSyncJobAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + PostConnectorSyncJobAction.NAME, + transportService, + actionFilters, + PostConnectorSyncJobAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.syncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute( + Task task, + PostConnectorSyncJobAction.Request request, + ActionListener listener + ) { + syncJobIndexService.createConnectorSyncJob(request, listener); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 52e9919924419..dd8550ea73da0 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -168,6 +168,16 @@ public static ConnectorFiltering getRandomConnectorFiltering() { .build(); } + public static Connector getRandomSyncJobConnectorInfo() { + return new Connector.Builder().setConnectorId(randomAlphaOfLength(10)) + .setFiltering(List.of(getRandomConnectorFiltering())) + .setIndexName(randomAlphaOfLength(10)) + .setLanguage(randomAlphaOfLength(10)) + .setServiceType(randomAlphaOfLength(10)) + .setConfiguration(Collections.emptyMap()) + .build(); + } + public static Connector getRandomConnector() { return new Connector.Builder().setConnectorId(randomAlphaOfLength(10)) .setApiKeyId(randomFrom(new String[] { null, randomAlphaOfLength(10) })) @@ -217,7 +227,7 @@ private static Cron getRandomCronExpression() { ); } - private static ConnectorSyncStatus getRandomSyncStatus() { + public static ConnectorSyncStatus getRandomSyncStatus() { ConnectorSyncStatus[] values = ConnectorSyncStatus.values(); return values[randomInt(values.length - 1)]; } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java new file mode 100644 index 0000000000000..309675490ad99 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -0,0 +1,220 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.junit.Before; + +import java.time.Instant; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class ConnectorSyncJobIndexServiceTests extends ESSingleNodeTestCase { + + private static final String NON_EXISTING_CONNECTOR_ID = "non-existing-connector-id"; + private static final int TIMEOUT_SECONDS = 10; + + private ConnectorSyncJobIndexService connectorSyncJobIndexService; + private Connector connector; + + @Before + public void setup() throws Exception { + connector = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); + + final IndexRequest indexRequest = new IndexRequest(ConnectorIndexService.CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connector.getConnectorId()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(connector.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + ActionFuture index = client().index(indexRequest); + + // wait 10 seconds for connector creation + index.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client()); + } + + public void testCreateConnectorSyncJob() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); + + String id = (String) connectorSyncJobSource.get(ConnectorSyncJob.ID_FIELD.getPreferredName()); + + ConnectorSyncJobType requestJobType = syncJobRequest.getJobType(); + ConnectorSyncJobType jobType = ConnectorSyncJobType.fromString( + (String) connectorSyncJobSource.get(ConnectorSyncJob.JOB_TYPE_FIELD.getPreferredName()) + ); + + ConnectorSyncJobTriggerMethod requestTriggerMethod = syncJobRequest.getTriggerMethod(); + ConnectorSyncJobTriggerMethod triggerMethod = ConnectorSyncJobTriggerMethod.fromString( + (String) connectorSyncJobSource.get(ConnectorSyncJob.TRIGGER_METHOD_FIELD.getPreferredName()) + ); + + ConnectorSyncStatus initialStatus = ConnectorSyncStatus.fromString( + (String) connectorSyncJobSource.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + + Instant createdNow = Instant.parse((String) connectorSyncJobSource.get(ConnectorSyncJob.CREATED_AT_FIELD.getPreferredName())); + Instant lastSeen = Instant.parse((String) connectorSyncJobSource.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName())); + + Integer totalDocumentCount = (Integer) connectorSyncJobSource.get(ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName()); + Integer indexedDocumentCount = (Integer) connectorSyncJobSource.get( + ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName() + ); + Integer indexedDocumentVolume = (Integer) connectorSyncJobSource.get( + ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName() + ); + Integer deletedDocumentCount = (Integer) connectorSyncJobSource.get(ConnectorSyncJob.DELETED_DOCUMENT_COUNT.getPreferredName()); + + assertThat(id, notNullValue()); + assertThat(jobType, equalTo(requestJobType)); + assertThat(triggerMethod, equalTo(requestTriggerMethod)); + assertThat(initialStatus, equalTo(ConnectorSyncJob.DEFAULT_INITIAL_STATUS)); + assertThat(createdNow, equalTo(lastSeen)); + assertThat(totalDocumentCount, equalTo(0)); + assertThat(indexedDocumentCount, equalTo(0)); + assertThat(indexedDocumentVolume, equalTo(0)); + assertThat(deletedDocumentCount, equalTo(0)); + } + + public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeToBeSet() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( + connector.getConnectorId(), + null, + ConnectorSyncJobTriggerMethod.ON_DEMAND + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + + Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); + ConnectorSyncJobType jobType = ConnectorSyncJobType.fromString( + (String) connectorSyncJobSource.get(ConnectorSyncJob.JOB_TYPE_FIELD.getPreferredName()) + ); + + assertThat(jobType, equalTo(ConnectorSyncJob.DEFAULT_JOB_TYPE)); + } + + public void testCreateConnectorSyncJob_WithMissingTriggerMethod_ExpectDefaultTriggerMethodToBeSet() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( + connector.getConnectorId(), + ConnectorSyncJobType.FULL, + null + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + + Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); + ConnectorSyncJobTriggerMethod triggerMethod = ConnectorSyncJobTriggerMethod.fromString( + (String) connectorSyncJobSource.get(ConnectorSyncJob.TRIGGER_METHOD_FIELD.getPreferredName()) + ); + + assertThat(triggerMethod, equalTo(ConnectorSyncJob.DEFAULT_TRIGGER_METHOD)); + } + + public void testCreateConnectorSyncJob_WithMissingConnectorId_ExpectException() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( + NON_EXISTING_CONNECTOR_ID, + ConnectorSyncJobType.FULL, + ConnectorSyncJobTriggerMethod.ON_DEMAND + ); + awaitPutConnectorSyncJobExpectingException( + syncJobRequest, + ActionListener.wrap(response -> {}, exception -> assertThat(exception.getMessage(), containsString(NON_EXISTING_CONNECTOR_ID))) + ); + } + + private Map getConnectorSyncJobSourceById(String syncJobId) throws ExecutionException, InterruptedException, + TimeoutException { + GetRequest getRequest = new GetRequest(ConnectorSyncJobIndexService.CONNECTOR_SYNC_JOB_INDEX_NAME, syncJobId); + ActionFuture getResponseActionFuture = client().get(getRequest); + + return getResponseActionFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getSource(); + } + + private void awaitPutConnectorSyncJobExpectingException( + PostConnectorSyncJobAction.Request syncJobRequest, + ActionListener listener + ) throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + + connectorSyncJobIndexService.createConnectorSyncJob(syncJobRequest, new ActionListener<>() { + @Override + public void onResponse(PostConnectorSyncJobAction.Response putConnectorSyncJobResponse) { + fail("Expected an exception and not a successful response"); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + latch.countDown(); + } + }); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + assertTrue("Timeout waiting for put request", requestTimedOut); + } + + private PostConnectorSyncJobAction.Response awaitPutConnectorSyncJob(PostConnectorSyncJobAction.Request syncJobRequest) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + + final AtomicReference responseRef = new AtomicReference<>(null); + final AtomicReference exception = new AtomicReference<>(null); + + connectorSyncJobIndexService.createConnectorSyncJob(syncJobRequest, new ActionListener<>() { + @Override + public void onResponse(PostConnectorSyncJobAction.Response putConnectorSyncJobResponse) { + responseRef.set(putConnectorSyncJobResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exception.set(e); + latch.countDown(); + } + }); + + if (exception.get() != null) { + throw exception.get(); + } + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + PostConnectorSyncJobAction.Response response = responseRef.get(); + + assertTrue("Timeout waiting for post request", requestTimedOut); + assertNotNull("Received null response from post request", response); + + return response; + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java new file mode 100644 index 0000000000000..5ce6925ae1cda --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.core.Tuple; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; + +import java.time.Instant; + +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; +import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.randomInstantBetween; +import static org.elasticsearch.test.ESTestCase.randomInt; +import static org.elasticsearch.test.ESTestCase.randomLong; +import static org.elasticsearch.test.ESTestCase.randomMap; + +public class ConnectorSyncJobTestUtils { + + public static ConnectorSyncJob getRandomConnectorSyncJob() { + Instant lowerBoundInstant = Instant.ofEpochSecond(0L); + Instant upperBoundInstant = Instant.ofEpochSecond(3000000000L); + + return new ConnectorSyncJob.Builder().setCancellationRequestedAt( + randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) }) + ) + .setCanceledAt(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) + .setCompletedAt(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) + .setConnector(ConnectorTestUtils.getRandomSyncJobConnectorInfo()) + .setCreatedAt(randomInstantBetween(lowerBoundInstant, upperBoundInstant)) + .setDeletedDocumentCount(randomLong()) + .setError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) + .setId(randomAlphaOfLength(10)) + .setIndexedDocumentCount(randomLong()) + .setIndexedDocumentVolume(randomLong()) + .setJobType(getRandomConnectorJobType()) + .setLastSeen(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) + .setMetadata( + randomMap( + 0, + 10, + () -> new Tuple<>(randomAlphaOfLength(10), randomFrom(new Object[] { null, randomAlphaOfLength(10), randomLong() })) + ) + ) + .setStartedAt(randomFrom(new Instant[] { null, randomInstantBetween(lowerBoundInstant, upperBoundInstant) })) + .setStatus(ConnectorTestUtils.getRandomSyncStatus()) + .setTotalDocumentCount(randomLong()) + .setTriggerMethod(getRandomConnectorSyncJobTriggerMethod()) + .setWorkerHostname(randomAlphaOfLength(10)) + .build(); + } + + public static ConnectorSyncJobTriggerMethod getRandomConnectorSyncJobTriggerMethod() { + ConnectorSyncJobTriggerMethod[] values = ConnectorSyncJobTriggerMethod.values(); + return values[randomInt(values.length - 1)]; + } + + public static ConnectorSyncJobType getRandomConnectorJobType() { + ConnectorSyncJobType[] values = ConnectorSyncJobType.values(); + return values[randomInt(values.length - 1)]; + } + + public static PostConnectorSyncJobAction.Request getRandomPostConnectorSyncJobActionRequest() { + return new PostConnectorSyncJobAction.Request( + randomAlphaOfLengthBetween(5, 15), + randomFrom(ConnectorSyncJobType.values()), + randomFrom(ConnectorSyncJobTriggerMethod.values()) + ); + } + + public static PostConnectorSyncJobAction.Request getRandomPostConnectorSyncJobActionRequest(String connectorId) { + return new PostConnectorSyncJobAction.Request( + connectorId, + randomFrom(ConnectorSyncJobType.values()), + randomFrom(ConnectorSyncJobTriggerMethod.values()) + ); + } + + public static PostConnectorSyncJobAction.Response getRandomPostConnectorSyncJobActionResponse() { + return new PostConnectorSyncJobAction.Response(randomAlphaOfLength(10)); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java new file mode 100644 index 0000000000000..aeecf582c9ec7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.Connector; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class ConnectorSyncJobTests extends ESTestCase { + + private NamedWriteableRegistry namedWriteableRegistry; + + @Before + public void registerNamedObjects() { + namedWriteableRegistry = new NamedWriteableRegistry( + List.of(new NamedWriteableRegistry.Entry(Connector.class, Connector.NAME, Connector::new)) + ); + } + + public final void testRandomSerialization() throws IOException { + for (int run = 0; run < 10; run++) { + ConnectorSyncJob syncJob = ConnectorSyncJobTestUtils.getRandomConnectorSyncJob(); + assertTransportSerialization(syncJob); + } + } + + private void assertTransportSerialization(ConnectorSyncJob testInstance) throws IOException { + ConnectorSyncJob deserializedInstance = copyInstance(testInstance); + assertNotSame(testInstance, deserializedInstance); + assertThat(testInstance, equalTo(deserializedInstance)); + } + + private ConnectorSyncJob copyInstance(ConnectorSyncJob instance) throws IOException { + return copyWriteable(instance, namedWriteableRegistry, ConnectorSyncJob::new); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTriggerMethodTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTriggerMethodTests.java new file mode 100644 index 0000000000000..34b0c2a9b281e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTriggerMethodTests.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class ConnectorSyncJobTriggerMethodTests extends ESTestCase { + + public void testFromString_WithValidTriggerMethodString() { + ConnectorSyncJobTriggerMethod triggerMethod = ConnectorSyncJobTestUtils.getRandomConnectorSyncJobTriggerMethod(); + + assertThat(ConnectorSyncJobTriggerMethod.fromString(triggerMethod.toString()), equalTo(triggerMethod)); + } + + public void testFromString_WithInvalidTriggerMethodString_ExpectException() { + expectThrows(IllegalArgumentException.class, () -> ConnectorSyncJobTriggerMethod.fromString("invalid string")); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTypeTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTypeTests.java new file mode 100644 index 0000000000000..f716563141edc --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTypeTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class ConnectorSyncJobTypeTests extends ESTestCase { + + public void testFromString_WithValidSyncJobTypeString() { + ConnectorSyncJobType syncJobType = ConnectorSyncJobTestUtils.getRandomConnectorJobType(); + + assertThat(ConnectorSyncJobType.fromString(syncJobType.toString()), equalTo(syncJobType)); + } + + public void testFromString_WithInvalidSyncJobTypeString_ExpectException() { + expectThrows(IllegalArgumentException.class, () -> ConnectorSyncJobType.fromString("invalid sync job type")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..73e6036dd5148 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionRequestBWCSerializingTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class PostConnectorSyncJobActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + PostConnectorSyncJobAction.Request> { + + @Override + protected Writeable.Reader instanceReader() { + return PostConnectorSyncJobAction.Request::new; + } + + @Override + protected PostConnectorSyncJobAction.Request createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(); + } + + @Override + protected PostConnectorSyncJobAction.Request mutateInstance(PostConnectorSyncJobAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PostConnectorSyncJobAction.Request doParseInstance(XContentParser parser) throws IOException { + return PostConnectorSyncJobAction.Request.fromXContent(parser); + } + + @Override + protected PostConnectorSyncJobAction.Request mutateInstanceForVersion( + PostConnectorSyncJobAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..2493781b7325d --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionResponseBWCSerializingTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class PostConnectorSyncJobActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + PostConnectorSyncJobAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return PostConnectorSyncJobAction.Response::new; + } + + @Override + protected PostConnectorSyncJobAction.Response createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionResponse(); + } + + @Override + protected PostConnectorSyncJobAction.Response mutateInstance(PostConnectorSyncJobAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PostConnectorSyncJobAction.Response mutateInstanceForVersion( + PostConnectorSyncJobAction.Response instance, + TransportVersion version + ) { + return instance; + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..0a2f94a5f821e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTriggerMethod; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobType; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class PostConnectorSyncJobActionTests extends ESTestCase { + + public void testValidate_WhenConnectorIdIsPresent_ExpectNoValidationError() { + PostConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorIdIsNull_ExpectValidationError() { + PostConnectorSyncJobAction.Request requestWithMissingConnectorId = new PostConnectorSyncJobAction.Request( + null, + ConnectorSyncJobType.FULL, + ConnectorSyncJobTriggerMethod.ON_DEMAND + ); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(PostConnectorSyncJobAction.Request.EMPTY_CONNECTOR_ID_ERROR_MESSAGE)); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..2463637ada2dd --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportPostConnectorSyncJobActionTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportPostConnectorSyncJobActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportPostConnectorSyncJobAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportPostConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testPostConnectorSyncJob_ExpectNoWarnings() throws InterruptedException { + PostConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(PostConnectorSyncJobAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for post request", requestTimedOut); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 99d3c7e78a478..bdfb3bc14286c 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -127,6 +127,7 @@ public class Constants { "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", + "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", "cluster:admin/xpack/enrich/delete", From 91df20b6132b41f34268b933f883f340821d572b Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Thu, 30 Nov 2023 13:32:20 +0200 Subject: [PATCH 098/263] MockTransportService did not call handle exception (#102766) A failure to send the delayed message would previously not call the handler logic for exceptions. This resulted in some tests complaining about un-subtracted bytes in IndexingPressure. Also adding some logging that helped figuring out the situation. Fixes #92344 --- .../elasticsearch/index/IndexingPressure.java | 5 ++ .../elasticsearch/transport/Transport.java | 49 +++++++------------ .../transport/TransportService.java | 19 ++++++- .../transport/InboundHandlerTests.java | 4 +- .../test/transport/MockTransportService.java | 24 ++++++++- 5 files changed, 64 insertions(+), 37 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index 5a6cd66e2d5c1..d0bc8ad980dde 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -101,12 +102,14 @@ public Releasable markCoordinatingOperationStarted(int operations, long bytes, b false ); } + logger.trace(() -> Strings.format("adding [%d] coordinating operations and [%d] bytes", operations, bytes)); currentCoordinatingBytes.getAndAdd(bytes); currentCoordinatingOps.getAndAdd(operations); totalCombinedCoordinatingAndPrimaryBytes.getAndAdd(bytes); totalCoordinatingBytes.getAndAdd(bytes); totalCoordinatingOps.getAndAdd(operations); return wrapReleasable(() -> { + logger.trace(() -> Strings.format("removing [%d] coordinating operations and [%d] bytes", operations, bytes)); this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); this.currentCoordinatingBytes.getAndAdd(-bytes); this.currentCoordinatingOps.getAndAdd(-operations); @@ -153,12 +156,14 @@ public Releasable markPrimaryOperationStarted(int operations, long bytes, boolea false ); } + logger.trace(() -> Strings.format("adding [%d] primary operations and [%d] bytes", operations, bytes)); currentPrimaryBytes.getAndAdd(bytes); currentPrimaryOps.getAndAdd(operations); totalCombinedCoordinatingAndPrimaryBytes.getAndAdd(bytes); totalPrimaryBytes.getAndAdd(bytes); totalPrimaryOps.getAndAdd(operations); return wrapReleasable(() -> { + logger.trace(() -> Strings.format("removing [%d] primary operations and [%d] bytes", operations, bytes)); this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); this.currentPrimaryBytes.getAndAdd(-bytes); this.currentPrimaryOps.getAndAdd(-operations); diff --git a/server/src/main/java/org/elasticsearch/transport/Transport.java b/server/src/main/java/org/elasticsearch/transport/Transport.java index e1512056c35d6..5b396daa5d51f 100644 --- a/server/src/main/java/org/elasticsearch/transport/Transport.java +++ b/server/src/main/java/org/elasticsearch/transport/Transport.java @@ -106,7 +106,7 @@ interface Connection extends Closeable, RefCounted { /** * Sends the request to the node this connection is associated with - * @param requestId see {@link ResponseHandlers#add(ResponseContext)} for details + * @param requestId see {@link ResponseHandlers#add(TransportResponseHandler, Connection, String)} for details * @param action the action to execute * @param request the request to send * @param options request options to apply @@ -155,35 +155,15 @@ default Object getCacheKey() { } /** - * This class represents a response context that encapsulates the actual response handler, the action and the connection it was - * executed on. + * This class represents a response context that encapsulates the actual response handler, the action. the connection it was + * executed on, and the request ID. */ - final class ResponseContext { - - private final TransportResponseHandler handler; - - private final Connection connection; - - private final String action; - - ResponseContext(TransportResponseHandler handler, Connection connection, String action) { - this.handler = handler; - this.connection = connection; - this.action = action; - } - - public TransportResponseHandler handler() { - return handler; - } - - public Connection connection() { - return this.connection; - } - - public String action() { - return this.action; - } - } + record ResponseContext( + TransportResponseHandler handler, + Connection connection, + String action, + long requestId + ) {}; /** * This class is a registry that allows @@ -210,14 +190,19 @@ public ResponseContext remove(long requestId) { /** * Adds a new response context and associates it with a new request ID. - * @return the new request ID + * @return the new response context * @see Connection#sendRequest(long, String, TransportRequest, TransportRequestOptions) */ - public long add(ResponseContext holder) { + public ResponseContext add( + TransportResponseHandler handler, + Connection connection, + String action + ) { long requestId = newRequestId(); + ResponseContext holder = new ResponseContext<>(handler, connection, action, requestId); ResponseContext existing = handlers.put(requestId, holder); assert existing == null : "request ID already in use: " + requestId; - return requestId; + return holder; } /** diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 8e6c1e67fcd10..5ce44c74a7a69 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -359,6 +359,10 @@ protected void doStop() { try { final TransportResponseHandler handler = holderToNotify.handler(); final var targetNode = holderToNotify.connection().getNode(); + final long requestId = holderToNotify.requestId(); + if (tracerLog.isTraceEnabled() && shouldTraceAction(holderToNotify.action())) { + tracerLog.trace("[{}][{}] pruning request for node [{}]", requestId, holderToNotify.action(), targetNode); + } assert transport instanceof TcpTransport == false /* other transports (used in tests) may not implement the proper close-connection behaviour. TODO fix this. */ @@ -922,7 +926,7 @@ private void sendRequestInternal( Supplier storedContextSupplier = threadPool.getThreadContext().newRestorableContext(true); ContextRestoreResponseHandler responseHandler = new ContextRestoreResponseHandler<>(storedContextSupplier, handler); // TODO we can probably fold this entire request ID dance into connection.sendRequest but it will be a bigger refactoring - final long requestId = responseHandlers.add(new Transport.ResponseContext<>(responseHandler, connection, action)); + final long requestId = responseHandlers.add(responseHandler, connection, action).requestId(); request.setRequestId(requestId); final TimeoutHandler timeoutHandler; if (options.timeout() != null) { @@ -951,7 +955,7 @@ private void sendRequestInternal( } } - private void handleInternalSendException( + protected void handleInternalSendException( String action, DiscoveryNode node, long requestId, @@ -986,6 +990,9 @@ public void onFailure(Exception e) { @Override protected void doRun() { + if (tracerLog.isTraceEnabled() && shouldTraceAction(action)) { + tracerLog.trace("[{}][{}] failed to send request to node [{}]", requestId, action, node); + } contextToNotify.handler().handleException(sendRequestException); } }); @@ -1301,6 +1308,14 @@ public void onConnectionClosed(Transport.Connection connection) { @Override public void doRun() { for (Transport.ResponseContext holderToNotify : pruned) { + if (tracerLog.isTraceEnabled() && shouldTraceAction(holderToNotify.action())) { + tracerLog.trace( + "[{}][{}] pruning request because connection to node [{}] closed", + holderToNotify.requestId(), + holderToNotify.action(), + connection.getNode() + ); + } holderToNotify.handler().handleException(new NodeDisconnectedException(connection.getNode(), holderToNotify.action())); } } diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index 191ce130805a8..6ace25021348c 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -137,7 +137,7 @@ public void testRequestAndResponse() throws Exception { AtomicReference exceptionCaptor = new AtomicReference<>(); AtomicReference channelCaptor = new AtomicReference<>(); - long requestId = responseHandlers.add(new Transport.ResponseContext<>(new TransportResponseHandler() { + long requestId = responseHandlers.add(new TransportResponseHandler() { @Override public Executor executor(ThreadPool threadPool) { return TransportResponseHandler.TRANSPORT_WORKER; @@ -157,7 +157,7 @@ public void handleException(TransportException exp) { public TestResponse read(StreamInput in) throws IOException { return new TestResponse(in); } - }, null, action)); + }, null, action).requestId(); RequestHandlerRegistry registry = new RequestHandlerRegistry<>( action, TestRequest::new, diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 90c3b1f062e94..fc048bbe0758f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -551,11 +552,23 @@ public void sendRequest( final RunOnce runnable = new RunOnce(new AbstractRunnable() { @Override public void onFailure(Exception e) { - logger.debug("failed to send delayed request", e); + logger.debug( + () -> Strings.format( + "[%d][%s] failed to send delayed request to node [%s]", + requestId, + action, + connection.getNode() + ), + e + ); + handleInternalSendException(action, connection.getNode(), requestId, null, e); } @Override protected void doRun() throws IOException { + logger.debug( + () -> Strings.format("[%d][%s] sending delayed request to node [%s]", requestId, action, connection.getNode()) + ); connection.sendRequest(requestId, action, clonedRequest, options); } }); @@ -566,6 +579,15 @@ protected void doRun() throws IOException { runnable.run(); } else { requestsToSendWhenCleared.add(runnable); + logger.debug( + () -> Strings.format( + "[%d][%s] delaying sending request to node [%s] by [%s]", + requestId, + action, + connection.getNode(), + delay + ) + ); threadPool.schedule(runnable, delay, threadPool.generic()); } } From 45d3d602e0323313dcfbf834413177f2d58a168d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Thu, 30 Nov 2023 12:40:42 +0100 Subject: [PATCH 099/263] [Profiling] Fix null_pointer_exception on missing requested_duration (#102772) --- .../xpack/profiling/TransportGetStackTracesAction.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 7944c421e8135..801ed012de0ee 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -839,7 +839,7 @@ public double getSamplingRate() { return samplingRate; } - public void setRequestedDuration(double requestedDuration) { + public void setRequestedDuration(Double requestedDuration) { this.requestedDuration = requestedDuration; } From fc9991aa4e30aabf06813d066dccd3e9a60bdf08 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Thu, 30 Nov 2023 12:48:40 +0100 Subject: [PATCH 100/263] Disable metrics sending only when the setting was not used explicitly (#102796) RunTaks is used by both statefull and serverless. when --with-apm-server is used, `telemetry.metrics.enabled` is always turned to true and server_url is set to localhost However when --with-apm-server is NOT used, then we should disable `telemetry.metrics.enabled` only if that setting was not explicitly used. This explicte use is for instance when running gradlew run and configuring ES to send to ESS cluster a follow up from https://github.com/elastic/elasticsearch/pull/101941/files#r1388403805 --- .../java/org/elasticsearch/gradle/testclusters/RunTask.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index 477842a201bb9..86df3544ddfc6 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -205,8 +205,9 @@ public void beforeStart() { } catch (IOException e) { logger.warn("Unable to start APM server", e); } - } else { - // metrics are enabled by default, if the --with-apm-server was not used we should disable it + } else if (node.getSettingKeys().contains("telemetry.metrics.enabled") == false) { + // in serverless metrics are enabled by default + // if metrics were not enabled explicitly for gradlew run we should disable them node.setting("telemetry.metrics.enabled", "false"); } From 98b9bd547af1067cbc2b593cc198c47eced62729 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 30 Nov 2023 13:16:35 +0100 Subject: [PATCH 101/263] Mute tests (#102785) --- .../org/elasticsearch/plugins/cli/InstallPluginActionTests.java | 1 + .../elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java | 2 ++ 2 files changed, 3 insertions(+) diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java index c088e89338e74..f7882a3fce743 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java @@ -118,6 +118,7 @@ import static org.mockito.Mockito.spy; @LuceneTestCase.SuppressFileSystems("*") +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102783") public class InstallPluginActionTests extends ESTestCase { private InstallPluginAction skipJarHellAction; diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java index 31d0a7646e1b7..ffe122b8de222 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java @@ -9,6 +9,7 @@ import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -46,6 +47,7 @@ * Tests that run ESQL queries that have, in the past, used so much memory they * crash Elasticsearch. */ +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102784") public class HeapAttackIT extends ESRestTestCase { /** * This used to fail, but we've since compacted top n so it actually succeeds now. From 714aa680f11514ee981682785423bc4d3bb7d383 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Thu, 30 Nov 2023 13:38:58 +0100 Subject: [PATCH 102/263] ESQL: Simplify refCount semantics of Block::expand (#102775) Now that Blocks are refcounted, we can make it so the returned block always has to be closed/decRef'd by the caller, even if it points to the same block. --- .../compute/data/BooleanArrayBlock.java | 1 + .../compute/data/BytesRefArrayBlock.java | 1 + .../compute/data/DoubleArrayBlock.java | 1 + .../compute/data/IntArrayBlock.java | 1 + .../compute/data/LongArrayBlock.java | 1 + .../compute/data/AbstractVectorBlock.java | 1 + .../org/elasticsearch/compute/data/Block.java | 5 +-- .../compute/data/ConstantNullBlock.java | 1 + .../compute/data/X-ArrayBlock.java.st | 1 + .../compute/operator/MvExpandOperator.java | 38 +++++++++---------- .../compute/data/BlockMultiValuedTests.java | 9 +---- 11 files changed, 28 insertions(+), 32 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index ed38d3139dd4a..844a8bc1b7290 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -80,6 +80,7 @@ public ElementType elementType() { @Override public BooleanBlock expand() { if (firstValueIndexes == null) { + incRef(); return this; } // TODO use reference counting to share the values diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 6aef8fa54b134..2ae412b3867a0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -83,6 +83,7 @@ public ElementType elementType() { @Override public BytesRefBlock expand() { if (firstValueIndexes == null) { + incRef(); return this; } // TODO use reference counting to share the values diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index 6a5af2d7ca6de..a8dbb5ba1d963 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -80,6 +80,7 @@ public ElementType elementType() { @Override public DoubleBlock expand() { if (firstValueIndexes == null) { + incRef(); return this; } // TODO use reference counting to share the values diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 284520a5f3bd6..bc41b83eca375 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -80,6 +80,7 @@ public ElementType elementType() { @Override public IntBlock expand() { if (firstValueIndexes == null) { + incRef(); return this; } // TODO use reference counting to share the values diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index fccad0ec1f09b..be10a517b7df0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -80,6 +80,7 @@ public ElementType elementType() { @Override public LongBlock expand() { if (firstValueIndexes == null) { + incRef(); return this; } // TODO use reference counting to share the values diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java index 4a019db5e03c0..d33d1a1afda41 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java @@ -57,6 +57,7 @@ public final MvOrdering mvOrdering() { @Override public final Block expand() { + incRef(); return this; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 481a914dc89e9..1d8c548d90571 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -143,10 +143,9 @@ default boolean mvSortedAscending() { } /** - * Expand multivalued fields into one row per value. Returns the - * block if there aren't any multivalued fields to expand. + * Expand multivalued fields into one row per value. Returns the same block if there aren't any multivalued + * fields to expand. The returned block needs to be closed by the caller to release the block's resources. */ - // TODO: We should use refcounting instead of either deep copies or returning the same identical block. Block expand(); /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 5823a4b98d52c..03cfa2d940efd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -99,6 +99,7 @@ public MvOrdering mvOrdering() { @Override public Block expand() { + incRef(); return this; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 86a8dfc78450d..72123764e9b55 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -101,6 +101,7 @@ $endif$ @Override public $Type$Block expand() { if (firstValueIndexes == null) { + incRef(); return this; } // TODO use reference counting to share the values diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MvExpandOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MvExpandOperator.java index 503913dbb67e8..629cacb82a97f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MvExpandOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MvExpandOperator.java @@ -92,23 +92,16 @@ public final Page getOutput() { */ logger.trace("starting {}", prev); expandingBlock = prev.getBlock(channel); - if (expandingBlock.mayHaveMultivaluedFields() == false) { - logger.trace("can't have multivalued fields"); - noops++; - Page result = prev; - prev = null; - expandingBlock = null; - return result; - } expandedBlock = expandingBlock.expand(); + if (expandedBlock == expandingBlock) { // The expand was a noop - just return the previous page and clear state. logger.trace("expanded to same"); noops++; Page result = prev; prev = null; - expandingBlock = null; - expandedBlock = null; + + releaseAndClearState(); return result; } if (prev.getBlockCount() == 1) { @@ -119,11 +112,10 @@ public final Page getOutput() { */ logger.trace("single block output"); assert channel == 0; - prev.releaseBlocks(); - prev = null; - expandingBlock = null; Page result = new Page(expandedBlock); expandedBlock = null; + + releaseAndClearState(); return result; } } @@ -156,14 +148,7 @@ private Page sliceExpandedIntoPages() { nextItemOnExpanded = 0; } if (prevCompleted) { - Releasables.closeExpectNoException(() -> { - if (prev != null) { - prev.releaseBlocks(); - prev = null; - } - }, expandedBlock); - expandingBlock = null; - expandedBlock = null; + releaseAndClearState(); } return new Page(result); } @@ -206,6 +191,17 @@ private int[] nextDuplicateExpandingFilter() { } } + private void releaseAndClearState() { + Releasables.closeExpectNoException(() -> { + if (prev != null) { + prev.releaseBlocks(); + prev = null; + } + }, expandedBlock); + expandingBlock = null; + expandedBlock = null; + } + @Override public final boolean needsInput() { return prev == null && finished == false; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java index f4cea6b58cc53..1b0e61cea8135 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -152,9 +151,7 @@ private int[] randomFilterPositions(Block orig, boolean all, boolean shuffled) { } private void assertExpanded(Block orig) { - Block expanded = null; - try (orig) { - expanded = orig.expand(); + try (orig; Block expanded = orig.expand()) { assertThat(expanded.getPositionCount(), equalTo(orig.getTotalValueCount() + orig.nullValuesCount())); assertThat(expanded.getTotalValueCount(), equalTo(orig.getTotalValueCount())); @@ -172,10 +169,6 @@ private void assertExpanded(Block orig) { assertThat(BasicBlockTests.valuesAtPositions(expanded, np, ++np).get(0), equalTo(List.of(ov))); } } - } finally { - if (expanded != orig) { - Releasables.close(expanded); - } } } From a7d9b763a6aa57964b001712e922bb0148d241c8 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 30 Nov 2023 15:12:31 +0100 Subject: [PATCH 103/263] [Test Triage] Mute GetDataStreamsResponseTests test (#102814) see https://github.com/elastic/elasticsearch/issues/102813 --- .../datastreams/action/GetDataStreamsResponseTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 5ebea62fc596a..39f2c06bc95f7 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -63,6 +63,7 @@ protected Response mutateInstance(Response instance) { } @SuppressWarnings("unchecked") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102813") public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Exception { // we'll test a data stream with 3 backing indices and a failure store - two backing indices managed by ILM (having the ILM policy // configured for them) and the remainder without any ILM policy configured From 97770aaab6c87bdf38ff11688e0cb5849f18ef17 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 30 Nov 2023 14:44:04 +0000 Subject: [PATCH 104/263] Change the ClusterService reroute reference to a supplier to match other uses (#102690) --- .../cluster/service/ClusterService.java | 40 +++++++++++++++---- .../elasticsearch/node/NodeConstruction.java | 15 ++++--- .../service/FileSettingsServiceTests.java | 6 +-- .../snapshots/SnapshotResiliencyTests.java | 2 +- 4 files changed, 46 insertions(+), 17 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index abc05874cb36e..67b6d64775dff 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -31,6 +31,8 @@ import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; +import java.util.function.Supplier; + public class ClusterService extends AbstractLifecycleComponent { private final MasterService masterService; @@ -54,13 +56,24 @@ public class ClusterService extends AbstractLifecycleComponent { private final String nodeName; - private RerouteService rerouteService; + private final Supplier rerouteService; public ClusterService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, TaskManager taskManager) { + this(settings, clusterSettings, threadPool, taskManager, () -> { throw new IllegalStateException("RerouteService not provided"); }); + } + + public ClusterService( + Settings settings, + ClusterSettings clusterSettings, + ThreadPool threadPool, + TaskManager taskManager, + Supplier rerouteService + ) { this( settings, clusterSettings, new MasterService(settings, clusterSettings, threadPool, taskManager), + rerouteService, new ClusterApplierService(Node.NODE_NAME_SETTING.get(settings), settings, clusterSettings, threadPool) ); } @@ -70,10 +83,27 @@ public ClusterService( ClusterSettings clusterSettings, MasterService masterService, ClusterApplierService clusterApplierService + ) { + this( + settings, + clusterSettings, + masterService, + () -> { throw new IllegalStateException("RerouteService not provided"); }, + clusterApplierService + ); + } + + public ClusterService( + Settings settings, + ClusterSettings clusterSettings, + MasterService masterService, + Supplier rerouteService, + ClusterApplierService clusterApplierService ) { this.settings = settings; this.nodeName = Node.NODE_NAME_SETTING.get(settings); this.masterService = masterService; + this.rerouteService = rerouteService; this.operationRouting = new OperationRouting(settings, clusterSettings); this.clusterSettings = clusterSettings; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); @@ -90,14 +120,8 @@ public synchronized void setNodeConnectionsService(NodeConnectionsService nodeCo clusterApplierService.setNodeConnectionsService(nodeConnectionsService); } - public void setRerouteService(RerouteService rerouteService) { - assert this.rerouteService == null : "RerouteService is already set"; - this.rerouteService = rerouteService; - } - public RerouteService getRerouteService() { - assert this.rerouteService != null : "RerouteService not set"; - return rerouteService; + return rerouteService.get(); } @Override diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index ca254e20e8b37..0623c3b196e45 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -607,7 +607,8 @@ private void construct( telemetryProvider.getTracer() ); - ClusterService clusterService = createClusterService(settingsModule, threadPool, taskManager); + final SetOnce rerouteServiceReference = new SetOnce<>(); + ClusterService clusterService = createClusterService(settingsModule, threadPool, taskManager, rerouteServiceReference::get); clusterService.addStateApplier(scriptService); Supplier documentParsingObserverSupplier = getDocumentParsingObserverSupplier(); @@ -627,7 +628,6 @@ private void construct( SystemIndices systemIndices = createSystemIndices(settings); final SetOnce repositoriesServiceReference = new SetOnce<>(); - final SetOnce rerouteServiceReference = new SetOnce<>(); final ClusterInfoService clusterInfoService = serviceProvider.newClusterInfoService( pluginsService, settings, @@ -656,7 +656,6 @@ private void construct( RerouteService rerouteService = new BatchedRerouteService(clusterService, clusterModule.getAllocationService()::reroute); rerouteServiceReference.set(rerouteService); - clusterService.setRerouteService(rerouteService); clusterInfoService.addListener( new DiskThresholdMonitor( @@ -1075,12 +1074,18 @@ record PluginServiceInstances( postInjection(clusterModule, actionModule, clusterService, transportService, featureService); } - private ClusterService createClusterService(SettingsModule settingsModule, ThreadPool threadPool, TaskManager taskManager) { + private ClusterService createClusterService( + SettingsModule settingsModule, + ThreadPool threadPool, + TaskManager taskManager, + Supplier rerouteService + ) { ClusterService clusterService = new ClusterService( settingsModule.getSettings(), settingsModule.getClusterSettings(), threadPool, - taskManager + taskManager, + rerouteService ); resourcesToClose.add(clusterService); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index c25f6dd7e97c2..96b4df3b856b7 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -62,18 +62,19 @@ public class FileSettingsServiceTests extends ESTestCase { private ThreadPool threadpool; @Before - @SuppressWarnings("unchecked") public void setUp() throws Exception { super.setUp(); threadpool = new TestThreadPool("file_settings_service_tests"); + var reroute = mock(RerouteService.class); clusterService = spy( new ClusterService( Settings.builder().put(NODE_NAME_SETTING.getKey(), "test").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadpool, - new TaskManager(Settings.EMPTY, threadpool, Set.of()) + new TaskManager(Settings.EMPTY, threadpool, Set.of()), + () -> reroute ) ); @@ -83,7 +84,6 @@ public void setUp() throws Exception { .build(); doAnswer((Answer) invocation -> clusterState).when(clusterService).state(); - clusterService.setRerouteService(mock(RerouteService.class)); clusterService.setNodeConnectionsService(mock(NodeConnectionsService.class)); clusterService.getClusterApplierService().setInitialState(clusterState); clusterService.getMasterService().setClusterStatePublisher((e, pl, al) -> { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 6cf4430bfd962..26f91d360db83 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1612,6 +1612,7 @@ private final class TestClusterNode { settings, clusterSettings, masterService, + () -> (reason, priority, listener) -> listener.onResponse(null), new ClusterApplierService(node.getName(), settings, clusterSettings, threadPool) { @Override protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { @@ -1641,7 +1642,6 @@ protected void connectToNodesAndWait(ClusterState newClusterState) { } } ); - clusterService.setRerouteService((reason, priority, listener) -> listener.onResponse(null)); recoverySettings = new RecoverySettings(settings, clusterSettings); mockTransport = new DisruptableMockTransport(node, deterministicTaskQueue) { @Override From 058edd408f6c20a78816a40de5a4bc39a9fa6176 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Thu, 30 Nov 2023 16:00:14 +0100 Subject: [PATCH 105/263] [Connector API] Implement update scheduling action (#102799) Add endpoint to update the connector scheduling. --- .../api/connector.update_scheduling.json | 39 ++++ .../331_connector_update_scheduling.yml | 94 ++++++++++ .../xpack/application/EnterpriseSearch.java | 19 +- .../application/connector/Connector.java | 6 +- .../connector/ConnectorIndexService.java | 33 ++++ .../RestUpdateConnectorSchedulingAction.java | 45 +++++ ...nsportUpdateConnectorSchedulingAction.java | 55 ++++++ .../UpdateConnectorSchedulingAction.java | 177 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 46 ++++- .../connector/ConnectorTestUtils.java | 2 +- ...ulingActionRequestBWCSerializingTests.java | 51 +++++ ...lingActionResponseBWCSerializingTests.java | 43 +++++ .../xpack/security/operator/Constants.java | 1 + 13 files changed, 604 insertions(+), 7 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorSchedulingAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json new file mode 100644 index 0000000000000..8d45e588a75ef --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json @@ -0,0 +1,39 @@ +{ + "connector.update_scheduling": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the scheduling field in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_scheduling", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object containing the connector's scheduling configuration.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml new file mode 100644 index 0000000000000..21d588f538fc5 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml @@ -0,0 +1,94 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Scheduling": + - do: + connector.update_scheduling: + connector_id: test-connector + body: + scheduling: + access_control: + enabled: true + interval: 1 0 0 * * ? + full: + enabled: false + interval: 2 0 0 * * ? + incremental: + enabled: false + interval: 3 0 0 * * ? + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { scheduling.access_control.enabled: true } + - match: { scheduling.access_control.interval: "1 0 0 * * ?" } + - match: { scheduling.full.enabled: false } + - match: { scheduling.full.interval: "2 0 0 * * ?" } + - match: { scheduling.incremental.enabled: false } + - match: { scheduling.incremental.interval: "3 0 0 * * ?" } + +--- +"Update Connector Scheduling - 404 status code returned when connector doesn't exist": + - do: + catch: "missing" + connector.update_scheduling: + connector_id: test-non-existent-connector + body: + scheduling: + access_control: + enabled: true + interval: 1 0 0 * * ? + full: + enabled: false + interval: 2 0 0 * * ? + incremental: + enabled: false + interval: 3 0 0 * * ? + +--- +"Update Connector Scheduling - 400 status code returned when required fields are missing": + - do: + catch: "bad_request" + connector.update_scheduling: + connector_id: test-connector + body: + scheduling: + incremental: + enabled: false + interval: 3 0 0 * * ? + +--- +"Update Connector Scheduling - 400 status code returned with wrong CRON expression": + - do: + catch: "bad_request" + connector.update_scheduling: + connector_id: test-connector + body: + scheduling: + access_control: + enabled: true + interval: 61 0 0 * * ? + full: + enabled: false + interval: 2 0 0 * * ? + incremental: + enabled: false + interval: 3 0 0 * * ? + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 819c345392c65..970bc38c0145f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -50,10 +50,13 @@ import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; @@ -170,13 +173,17 @@ protected XPackLicenseState getLicenseState() { if (ConnectorAPIFeature.isEnabled()) { actionHandlers.addAll( List.of( + // Connector API new ActionHandler<>(DeleteConnectorAction.INSTANCE, TransportDeleteConnectorAction.class), new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), - new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class) + new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), + new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), + + // SyncJob API + new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class) ) ); - actionHandlers.add(new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class)); } return Collections.unmodifiableList(actionHandlers); @@ -225,13 +232,17 @@ public List getRestHandlers( if (ConnectorAPIFeature.isEnabled()) { restHandlers.addAll( List.of( + // Connector API new RestDeleteConnectorAction(), new RestGetConnectorAction(), new RestListConnectorAction(), - new RestPutConnectorAction() + new RestPutConnectorAction(), + new RestUpdateConnectorSchedulingAction(), + + // SyncJob API + new RestPostConnectorSyncJobAction() ) ); - restHandlers.add(new RestPostConnectorSyncJobAction()); } return Collections.unmodifiableList(restHandlers); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index bdee310612e18..d4aab30ba89bf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -201,7 +201,7 @@ public Connector(StreamInput in) throws IOException { public static final ParseField LANGUAGE_FIELD = new ParseField("language"); static final ParseField NAME_FIELD = new ParseField("name"); public static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); - static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); + public static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); public static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); static final ParseField STATUS_FIELD = new ParseField("status"); static final ParseField SYNC_CURSOR_FIELD = new ParseField("sync_cursor"); @@ -447,6 +447,10 @@ public String getConnectorId() { return connectorId; } + public ConnectorScheduling getScheduling() { + return scheduling; + } + public List getFiltering() { return filtering; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index a8c9749d3fbc1..d632a28d3f858 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -20,6 +20,8 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.index.IndexNotFoundException; @@ -29,6 +31,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import java.util.Arrays; import java.util.Collections; @@ -166,6 +169,36 @@ public void onFailure(Exception e) { } } + /** + * Updates the {@link ConnectorScheduling} property of a {@link Connector}. + * + * @param request The request for updating the connector's scheduling. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnectorList(SearchResponse response) { final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java new file mode 100644 index 0000000000000..06a6cb527544e --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorSchedulingAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_scheduling_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_scheduling")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorSchedulingAction.Request request = UpdateConnectorSchedulingAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorSchedulingAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorSchedulingAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorSchedulingAction.java new file mode 100644 index 0000000000000..186edb2328f38 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorSchedulingAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorSchedulingAction extends HandledTransportAction< + UpdateConnectorSchedulingAction.Request, + UpdateConnectorSchedulingAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorSchedulingAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorSchedulingAction.NAME, + transportService, + actionFilters, + UpdateConnectorSchedulingAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorSchedulingAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorScheduling( + request, + listener.map(r -> new UpdateConnectorSchedulingAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java new file mode 100644 index 0000000000000..eb0e265c44f28 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java @@ -0,0 +1,177 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorScheduling; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class UpdateConnectorSchedulingAction extends ActionType { + + public static final UpdateConnectorSchedulingAction INSTANCE = new UpdateConnectorSchedulingAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_scheduling"; + + public UpdateConnectorSchedulingAction() { + super(NAME, UpdateConnectorSchedulingAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + private final ConnectorScheduling scheduling; + + public Request(String connectorId, ConnectorScheduling scheduling) { + this.connectorId = connectorId; + this.scheduling = scheduling; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.scheduling = in.readOptionalWriteable(ConnectorScheduling::new); + } + + public String getConnectorId() { + return connectorId; + } + + public ConnectorScheduling getScheduling() { + return scheduling; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "connector_update_scheduling_request", + false, + ((args, connectorId) -> new UpdateConnectorSchedulingAction.Request(connectorId, (ConnectorScheduling) args[0])) + ); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> ConnectorScheduling.fromXContent(p), Connector.SCHEDULING_FIELD); + } + + public static UpdateConnectorSchedulingAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorSchedulingAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorSchedulingAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.SCHEDULING_FIELD.getPreferredName(), scheduling); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalWriteable(scheduling); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(scheduling, request.scheduling); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, scheduling); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 71076693c07f8..5d0d539262f10 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -11,8 +11,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; import java.util.ArrayList; @@ -36,7 +38,6 @@ public void setup() { } public void testPutConnector() throws Exception { - Connector connector = ConnectorTestUtils.getRandomConnector(); DocWriteResponse resp = awaitPutConnector(connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); @@ -60,6 +61,25 @@ public void testDeleteConnector() throws Exception { expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete)); } + public void testUpdateConnectorScheduling() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + ConnectorScheduling updatedScheduling = ConnectorTestUtils.getRandomConnectorScheduling(); + + UpdateConnectorSchedulingAction.Request updateSchedulingRequest = new UpdateConnectorSchedulingAction.Request( + connector.getConnectorId(), + updatedScheduling + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorScheduling(updateSchedulingRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + assertThat(updatedScheduling, equalTo(indexedConnector.getScheduling())); + } + private DeleteResponse awaitDeleteConnector(String connectorId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -160,4 +180,28 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorScheduling(UpdateConnectorSchedulingAction.Request updatedScheduling) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorScheduling(updatedScheduling, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update scheduling request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update scheduling request", resp.get()); + return resp.get(); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index dd8550ea73da0..e1752ed6fb354 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -195,7 +195,7 @@ public static Connector getRandomConnector() { .setPipeline(randomBoolean() ? getRandomConnectorIngestPipeline() : null) .setScheduling(randomBoolean() ? getRandomConnectorScheduling() : null) .setStatus(getRandomConnectorStatus()) - .setSyncCursor(randomBoolean() ? Map.of("foo", "bar") : null) + .setSyncCursor(randomBoolean() ? Map.of(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)) : null) .setSyncNow(randomBoolean()) .build(); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..ee2823a27400a --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionRequestBWCSerializingTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorSchedulingActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorSchedulingAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorSchedulingAction.Request::new; + } + + @Override + protected UpdateConnectorSchedulingAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorSchedulingAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorScheduling()); + } + + @Override + protected UpdateConnectorSchedulingAction.Request mutateInstance(UpdateConnectorSchedulingAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorSchedulingAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorSchedulingAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorSchedulingAction.Request mutateInstanceForVersion( + UpdateConnectorSchedulingAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..a03713fa61a36 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionResponseBWCSerializingTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorSchedulingActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorSchedulingAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorSchedulingAction.Response::new; + } + + @Override + protected UpdateConnectorSchedulingAction.Response createTestInstance() { + return new UpdateConnectorSchedulingAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorSchedulingAction.Response mutateInstance(UpdateConnectorSchedulingAction.Response instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorSchedulingAction.Response mutateInstanceForVersion( + UpdateConnectorSchedulingAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index bdfb3bc14286c..fa74b2986550d 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -128,6 +128,7 @@ public class Constants { "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", "cluster:admin/xpack/connector/sync_job/post", + "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", "cluster:admin/xpack/enrich/delete", From 8e9a88b63d24a3c428e91c14df3ea14ef195c1b3 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 30 Nov 2023 16:01:32 +0100 Subject: [PATCH 106/263] Update gradle wrapper to 8.5 (#102154) Update BuildLayout references --- .../gradle/wrapper/gradle-wrapper.properties | 4 ++-- .../src/main/resources/minimumGradleVersion | 2 +- gradle/wrapper/gradle-wrapper.jar | Bin 63721 -> 43462 bytes gradle/wrapper/gradle-wrapper.properties | 4 ++-- .../gradle/wrapper/gradle-wrapper.properties | 4 ++-- settings.gradle | 3 --- 6 files changed, 7 insertions(+), 10 deletions(-) diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 01f330a93e8fa..a7a990ab2a89e 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=f2b9ed0faf8472cbe469255ae6c86eddb77076c75191741b4a462f33128dd419 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-all.zip +distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index fad03000495ca..3d512719cff9b 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -8.4 \ No newline at end of file +8.5 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7f93135c49b765f8051ef9d0a6055ff8e46073d8..d64cd4917707c1f8861d8cb53dd15194d4248596 100644 GIT binary patch literal 43462 zcma&NWl&^owk(X(xVyW%ySuwf;qI=D6|RlDJ2cR^yEKh!@I- zp9QeisK*rlxC>+~7Dk4IxIRsKBHqdR9b3+fyL=ynHmIDe&|>O*VlvO+%z5;9Z$|DJ zb4dO}-R=MKr^6EKJiOrJdLnCJn>np?~vU-1sSFgPu;pthGwf}bG z(1db%xwr#x)r+`4AGu$j7~u2MpVs3VpLp|mx&;>`0p0vH6kF+D2CY0fVdQOZ@h;A` z{infNyvmFUiu*XG}RNMNwXrbec_*a3N=2zJ|Wh5z* z5rAX$JJR{#zP>KY**>xHTuw?|-Rg|o24V)74HcfVT;WtQHXlE+_4iPE8QE#DUm%x0 zEKr75ur~W%w#-My3Tj`hH6EuEW+8K-^5P62$7Sc5OK+22qj&Pd1;)1#4tKihi=~8C zHiQSst0cpri6%OeaR`PY>HH_;CPaRNty%WTm4{wDK8V6gCZlG@U3$~JQZ;HPvDJcT1V{ z?>H@13MJcCNe#5z+MecYNi@VT5|&UiN1D4ATT+%M+h4c$t;C#UAs3O_q=GxK0}8%8 z8J(_M9bayxN}69ex4dzM_P3oh@ZGREjVvn%%r7=xjkqxJP4kj}5tlf;QosR=%4L5y zWhgejO=vao5oX%mOHbhJ8V+SG&K5dABn6!WiKl{|oPkq(9z8l&Mm%(=qGcFzI=eLu zWc_oCLyf;hVlB@dnwY98?75B20=n$>u3b|NB28H0u-6Rpl((%KWEBOfElVWJx+5yg z#SGqwza7f}$z;n~g%4HDU{;V{gXIhft*q2=4zSezGK~nBgu9-Q*rZ#2f=Q}i2|qOp z!!y4p)4o=LVUNhlkp#JL{tfkhXNbB=Ox>M=n6soptJw-IDI|_$is2w}(XY>a=H52d z3zE$tjPUhWWS+5h=KVH&uqQS=$v3nRs&p$%11b%5qtF}S2#Pc`IiyBIF4%A!;AVoI zXU8-Rpv!DQNcF~(qQnyyMy=-AN~U>#&X1j5BLDP{?K!%h!;hfJI>$mdLSvktEr*89 zdJHvby^$xEX0^l9g$xW-d?J;L0#(`UT~zpL&*cEh$L|HPAu=P8`OQZV!-}l`noSp_ zQ-1$q$R-gDL)?6YaM!=8H=QGW$NT2SeZlb8PKJdc=F-cT@j7Xags+Pr*jPtlHFnf- zh?q<6;)27IdPc^Wdy-mX%2s84C1xZq9Xms+==F4);O`VUASmu3(RlgE#0+#giLh-& zcxm3_e}n4{%|X zJp{G_j+%`j_q5}k{eW&TlP}J2wtZ2^<^E(O)4OQX8FDp6RJq!F{(6eHWSD3=f~(h} zJXCf7=r<16X{pHkm%yzYI_=VDP&9bmI1*)YXZeB}F? z(%QsB5fo*FUZxK$oX~X^69;x~j7ms8xlzpt-T15e9}$4T-pC z6PFg@;B-j|Ywajpe4~bk#S6(fO^|mm1hKOPfA%8-_iGCfICE|=P_~e;Wz6my&)h_~ zkv&_xSAw7AZ%ThYF(4jADW4vg=oEdJGVOs>FqamoL3Np8>?!W#!R-0%2Bg4h?kz5I zKV-rKN2n(vUL%D<4oj@|`eJ>0i#TmYBtYmfla;c!ATW%;xGQ0*TW@PTlGG><@dxUI zg>+3SiGdZ%?5N=8uoLA|$4isK$aJ%i{hECP$bK{J#0W2gQ3YEa zZQ50Stn6hqdfxJ*9#NuSLwKFCUGk@c=(igyVL;;2^wi4o30YXSIb2g_ud$ zgpCr@H0qWtk2hK8Q|&wx)}4+hTYlf;$a4#oUM=V@Cw#!$(nOFFpZ;0lc!qd=c$S}Z zGGI-0jg~S~cgVT=4Vo)b)|4phjStD49*EqC)IPwyeKBLcN;Wu@Aeph;emROAwJ-0< z_#>wVm$)ygH|qyxZaet&(Vf%pVdnvKWJn9`%DAxj3ot;v>S$I}jJ$FLBF*~iZ!ZXE zkvui&p}fI0Y=IDX)mm0@tAd|fEHl~J&K}ZX(Mm3cm1UAuwJ42+AO5@HwYfDH7ipIc zmI;1J;J@+aCNG1M`Btf>YT>~c&3j~Qi@Py5JT6;zjx$cvOQW@3oQ>|}GH?TW-E z1R;q^QFjm5W~7f}c3Ww|awg1BAJ^slEV~Pk`Kd`PS$7;SqJZNj->it4DW2l15}xP6 zoCl$kyEF%yJni0(L!Z&14m!1urXh6Btj_5JYt1{#+H8w?5QI%% zo-$KYWNMJVH?Hh@1n7OSu~QhSswL8x0=$<8QG_zepi_`y_79=nK=_ZP_`Em2UI*tyQoB+r{1QYZCpb?2OrgUw#oRH$?^Tj!Req>XiE#~B|~ z+%HB;=ic+R@px4Ld8mwpY;W^A%8%l8$@B@1m5n`TlKI6bz2mp*^^^1mK$COW$HOfp zUGTz-cN9?BGEp}5A!mDFjaiWa2_J2Iq8qj0mXzk; z66JBKRP{p%wN7XobR0YjhAuW9T1Gw3FDvR5dWJ8ElNYF94eF3ebu+QwKjtvVu4L zI9ip#mQ@4uqVdkl-TUQMb^XBJVLW(-$s;Nq;@5gr4`UfLgF$adIhd?rHOa%D);whv z=;krPp~@I+-Z|r#s3yCH+c1US?dnm+C*)r{m+86sTJusLdNu^sqLrfWed^ndHXH`m zd3#cOe3>w-ga(Dus_^ppG9AC>Iq{y%%CK+Cro_sqLCs{VLuK=dev>OL1dis4(PQ5R zcz)>DjEkfV+MO;~>VUlYF00SgfUo~@(&9$Iy2|G0T9BSP?&T22>K46D zL*~j#yJ?)^*%J3!16f)@Y2Z^kS*BzwfAQ7K96rFRIh>#$*$_Io;z>ux@}G98!fWR@ zGTFxv4r~v)Gsd|pF91*-eaZ3Qw1MH$K^7JhWIdX%o$2kCbvGDXy)a?@8T&1dY4`;L z4Kn+f%SSFWE_rpEpL9bnlmYq`D!6F%di<&Hh=+!VI~j)2mfil03T#jJ_s?}VV0_hp z7T9bWxc>Jm2Z0WMU?`Z$xE74Gu~%s{mW!d4uvKCx@WD+gPUQ zV0vQS(Ig++z=EHN)BR44*EDSWIyT~R4$FcF*VEY*8@l=218Q05D2$|fXKFhRgBIEE zdDFB}1dKkoO^7}{5crKX!p?dZWNz$m>1icsXG2N+((x0OIST9Zo^DW_tytvlwXGpn zs8?pJXjEG;T@qrZi%#h93?FP$!&P4JA(&H61tqQi=opRzNpm zkrG}$^t9&XduK*Qa1?355wd8G2CI6QEh@Ua>AsD;7oRUNLPb76m4HG3K?)wF~IyS3`fXuNM>${?wmB zpVz;?6_(Fiadfd{vUCBM*_kt$+F3J+IojI;9L(gc9n3{sEZyzR9o!_mOwFC#tQ{Q~ zP3-`#uK#tP3Q7~Q;4H|wjZHO8h7e4IuBxl&vz2w~D8)w=Wtg31zpZhz%+kzSzL*dV zwp@{WU4i;hJ7c2f1O;7Mz6qRKeASoIv0_bV=i@NMG*l<#+;INk-^`5w@}Dj~;k=|}qM1vq_P z|GpBGe_IKq|LNy9SJhKOQ$c=5L{Dv|Q_lZl=-ky*BFBJLW9&y_C|!vyM~rQx=!vun z?rZJQB5t}Dctmui5i31C_;_}CEn}_W%>oSXtt>@kE1=JW*4*v4tPp;O6 zmAk{)m!)}34pTWg8{i>($%NQ(Tl;QC@J@FfBoc%Gr&m560^kgSfodAFrIjF}aIw)X zoXZ`@IsMkc8_=w%-7`D6Y4e*CG8k%Ud=GXhsTR50jUnm+R*0A(O3UKFg0`K;qp1bl z7``HN=?39ic_kR|^R^~w-*pa?Vj#7|e9F1iRx{GN2?wK!xR1GW!qa=~pjJb-#u1K8 zeR?Y2i-pt}yJq;SCiVHODIvQJX|ZJaT8nO+(?HXbLefulKKgM^B(UIO1r+S=7;kLJ zcH}1J=Px2jsh3Tec&v8Jcbng8;V-`#*UHt?hB(pmOipKwf3Lz8rG$heEB30Sg*2rx zV<|KN86$soN(I!BwO`1n^^uF2*x&vJ$2d$>+`(romzHP|)K_KkO6Hc>_dwMW-M(#S zK(~SiXT1@fvc#U+?|?PniDRm01)f^#55;nhM|wi?oG>yBsa?~?^xTU|fX-R(sTA+5 zaq}-8Tx7zrOy#3*JLIIVsBmHYLdD}!0NP!+ITW+Thn0)8SS!$@)HXwB3tY!fMxc#1 zMp3H?q3eD?u&Njx4;KQ5G>32+GRp1Ee5qMO0lZjaRRu&{W<&~DoJNGkcYF<5(Ab+J zgO>VhBl{okDPn78<%&e2mR{jwVCz5Og;*Z;;3%VvoGo_;HaGLWYF7q#jDX=Z#Ml`H z858YVV$%J|e<1n`%6Vsvq7GmnAV0wW4$5qQ3uR@1i>tW{xrl|ExywIc?fNgYlA?C5 zh$ezAFb5{rQu6i7BSS5*J-|9DQ{6^BVQ{b*lq`xS@RyrsJN?-t=MTMPY;WYeKBCNg z^2|pN!Q^WPJuuO4!|P@jzt&tY1Y8d%FNK5xK(!@`jO2aEA*4 zkO6b|UVBipci?){-Ke=+1;mGlND8)6+P;8sq}UXw2hn;fc7nM>g}GSMWu&v&fqh

    iViYT=fZ(|3Ox^$aWPp4a8h24tD<|8-!aK0lHgL$N7Efw}J zVIB!7=T$U`ao1?upi5V4Et*-lTG0XvExbf!ya{cua==$WJyVG(CmA6Of*8E@DSE%L z`V^$qz&RU$7G5mg;8;=#`@rRG`-uS18$0WPN@!v2d{H2sOqP|!(cQ@ zUHo!d>>yFArLPf1q`uBvY32miqShLT1B@gDL4XoVTK&@owOoD)OIHXrYK-a1d$B{v zF^}8D3Y^g%^cnvScOSJR5QNH+BI%d|;J;wWM3~l>${fb8DNPg)wrf|GBP8p%LNGN# z3EaIiItgwtGgT&iYCFy9-LG}bMI|4LdmmJt@V@% zb6B)1kc=T)(|L@0;wr<>=?r04N;E&ef+7C^`wPWtyQe(*pD1pI_&XHy|0gIGHMekd zF_*M4yi6J&Z4LQj65)S zXwdM{SwUo%3SbPwFsHgqF@V|6afT|R6?&S;lw=8% z3}@9B=#JI3@B*#4s!O))~z zc>2_4Q_#&+5V`GFd?88^;c1i7;Vv_I*qt!_Yx*n=;rj!82rrR2rQ8u5(Ejlo{15P% zs~!{%XJ>FmJ})H^I9bn^Re&38H{xA!0l3^89k(oU;bZWXM@kn$#aoS&Y4l^-WEn-fH39Jb9lA%s*WsKJQl?n9B7_~P z-XM&WL7Z!PcoF6_D>V@$CvUIEy=+Z&0kt{szMk=f1|M+r*a43^$$B^MidrT0J;RI` z(?f!O<8UZkm$_Ny$Hth1J#^4ni+im8M9mr&k|3cIgwvjAgjH z8`N&h25xV#v*d$qBX5jkI|xOhQn!>IYZK7l5#^P4M&twe9&Ey@@GxYMxBZq2e7?`q z$~Szs0!g{2fGcp9PZEt|rdQ6bhAgpcLHPz?f-vB?$dc*!9OL?Q8mn7->bFD2Si60* z!O%y)fCdMSV|lkF9w%x~J*A&srMyYY3{=&$}H zGQ4VG_?$2X(0|vT0{=;W$~icCI{b6W{B!Q8xdGhF|D{25G_5_+%s(46lhvNLkik~R z>nr(&C#5wwOzJZQo9m|U<;&Wk!_#q|V>fsmj1g<6%hB{jGoNUPjgJslld>xmODzGjYc?7JSuA?A_QzjDw5AsRgi@Y|Z0{F{!1=!NES-#*f^s4l0Hu zz468))2IY5dmD9pa*(yT5{EyP^G>@ZWumealS-*WeRcZ}B%gxq{MiJ|RyX-^C1V=0 z@iKdrGi1jTe8Ya^x7yyH$kBNvM4R~`fbPq$BzHum-3Zo8C6=KW@||>zsA8-Y9uV5V z#oq-f5L5}V<&wF4@X@<3^C%ptp6+Ce)~hGl`kwj)bsAjmo_GU^r940Z-|`<)oGnh7 zFF0Tde3>ui?8Yj{sF-Z@)yQd~CGZ*w-6p2U<8}JO-sRsVI5dBji`01W8A&3$?}lxBaC&vn0E$c5tW* zX>5(zzZ=qn&!J~KdsPl;P@bmA-Pr8T*)eh_+Dv5=Ma|XSle6t(k8qcgNyar{*ReQ8 zTXwi=8vr>!3Ywr+BhggHDw8ke==NTQVMCK`$69fhzEFB*4+H9LIvdt-#IbhZvpS}} zO3lz;P?zr0*0$%-Rq_y^k(?I{Mk}h@w}cZpMUp|ucs55bcloL2)($u%mXQw({Wzc~ z;6nu5MkjP)0C(@%6Q_I_vsWrfhl7Zpoxw#WoE~r&GOSCz;_ro6i(^hM>I$8y>`!wW z*U^@?B!MMmb89I}2(hcE4zN2G^kwyWCZp5JG>$Ez7zP~D=J^LMjSM)27_0B_X^C(M z`fFT+%DcKlu?^)FCK>QzSnV%IsXVcUFhFdBP!6~se&xxrIxsvySAWu++IrH;FbcY$ z2DWTvSBRfLwdhr0nMx+URA$j3i7_*6BWv#DXfym?ZRDcX9C?cY9sD3q)uBDR3uWg= z(lUIzB)G$Hr!){>E{s4Dew+tb9kvToZp-1&c?y2wn@Z~(VBhqz`cB;{E4(P3N2*nJ z_>~g@;UF2iG{Kt(<1PyePTKahF8<)pozZ*xH~U-kfoAayCwJViIrnqwqO}7{0pHw$ zs2Kx?s#vQr7XZ264>5RNKSL8|Ty^=PsIx^}QqOOcfpGUU4tRkUc|kc7-!Ae6!+B{o~7nFpm3|G5^=0#Bnm6`V}oSQlrX(u%OWnC zoLPy&Q;1Jui&7ST0~#+}I^&?vcE*t47~Xq#YwvA^6^} z`WkC)$AkNub|t@S!$8CBlwbV~?yp&@9h{D|3z-vJXgzRC5^nYm+PyPcgRzAnEi6Q^gslXYRv4nycsy-SJu?lMps-? zV`U*#WnFsdPLL)Q$AmD|0`UaC4ND07+&UmOu!eHruzV|OUox<+Jl|Mr@6~C`T@P%s zW7sgXLF2SSe9Fl^O(I*{9wsFSYb2l%-;&Pi^dpv!{)C3d0AlNY6!4fgmSgj_wQ*7Am7&$z;Jg&wgR-Ih;lUvWS|KTSg!&s_E9_bXBkZvGiC6bFKDWZxsD$*NZ#_8bl zG1P-#@?OQzED7@jlMJTH@V!6k;W>auvft)}g zhoV{7$q=*;=l{O>Q4a@ ziMjf_u*o^PsO)#BjC%0^h>Xp@;5$p{JSYDt)zbb}s{Kbt!T*I@Pk@X0zds6wsefuU zW$XY%yyRGC94=6mf?x+bbA5CDQ2AgW1T-jVAJbm7K(gp+;v6E0WI#kuACgV$r}6L? zd|Tj?^%^*N&b>Dd{Wr$FS2qI#Ucs1yd4N+RBUQiSZGujH`#I)mG&VKoDh=KKFl4=G z&MagXl6*<)$6P}*Tiebpz5L=oMaPrN+caUXRJ`D?=K9!e0f{@D&cZLKN?iNP@X0aF zE(^pl+;*T5qt?1jRC=5PMgV!XNITRLS_=9{CJExaQj;lt!&pdzpK?8p>%Mb+D z?yO*uSung=-`QQ@yX@Hyd4@CI^r{2oiu`%^bNkz+Nkk!IunjwNC|WcqvX~k=><-I3 zDQdbdb|!v+Iz01$w@aMl!R)koD77Xp;eZwzSl-AT zr@Vu{=xvgfq9akRrrM)}=!=xcs+U1JO}{t(avgz`6RqiiX<|hGG1pmop8k6Q+G_mv zJv|RfDheUp2L3=^C=4aCBMBn0aRCU(DQwX-W(RkRwmLeuJYF<0urcaf(=7)JPg<3P zQs!~G)9CT18o!J4{zX{_e}4eS)U-E)0FAt}wEI(c0%HkxgggW;(1E=>J17_hsH^sP z%lT0LGgbUXHx-K*CI-MCrP66UP0PvGqM$MkeLyqHdbgP|_Cm!7te~b8p+e6sQ_3k| zVcwTh6d83ltdnR>D^)BYQpDKlLk3g0Hdcgz2}%qUs9~~Rie)A-BV1mS&naYai#xcZ z(d{8=-LVpTp}2*y)|gR~;qc7fp26}lPcLZ#=JpYcn3AT9(UIdOyg+d(P5T7D&*P}# zQCYplZO5|7+r19%9e`v^vfSS1sbX1c%=w1;oyruXB%Kl$ACgKQ6=qNWLsc=28xJjg zwvsI5-%SGU|3p>&zXVl^vVtQT3o-#$UT9LI@Npz~6=4!>mc431VRNN8od&Ul^+G_kHC`G=6WVWM z%9eWNyy(FTO|A+@x}Ou3CH)oi;t#7rAxdIXfNFwOj_@Y&TGz6P_sqiB`Q6Lxy|Q{`|fgmRG(k+!#b*M+Z9zFce)f-7;?Km5O=LHV9f9_87; zF7%R2B+$?@sH&&-$@tzaPYkw0;=i|;vWdI|Wl3q_Zu>l;XdIw2FjV=;Mq5t1Q0|f< zs08j54Bp`3RzqE=2enlkZxmX6OF+@|2<)A^RNQpBd6o@OXl+i)zO%D4iGiQNuXd+zIR{_lb96{lc~bxsBveIw6umhShTX+3@ZJ=YHh@ zWY3(d0azg;7oHn>H<>?4@*RQbi>SmM=JrHvIG(~BrvI)#W(EAeO6fS+}mxxcc+X~W6&YVl86W9WFSS}Vz-f9vS?XUDBk)3TcF z8V?$4Q)`uKFq>xT=)Y9mMFVTUk*NIA!0$?RP6Ig0TBmUFrq*Q-Agq~DzxjStQyJ({ zBeZ;o5qUUKg=4Hypm|}>>L=XKsZ!F$yNTDO)jt4H0gdQ5$f|d&bnVCMMXhNh)~mN z@_UV6D7MVlsWz+zM+inZZp&P4fj=tm6fX)SG5H>OsQf_I8c~uGCig$GzuwViK54bcgL;VN|FnyQl>Ed7(@>=8$a_UKIz|V6CeVSd2(P z0Uu>A8A+muM%HLFJQ9UZ5c)BSAv_zH#1f02x?h9C}@pN@6{>UiAp>({Fn(T9Q8B z^`zB;kJ5b`>%dLm+Ol}ty!3;8f1XDSVX0AUe5P#@I+FQ-`$(a;zNgz)4x5hz$Hfbg z!Q(z26wHLXko(1`;(BAOg_wShpX0ixfWq3ponndY+u%1gyX)_h=v1zR#V}#q{au6; z!3K=7fQwnRfg6FXtNQmP>`<;!N137paFS%y?;lb1@BEdbvQHYC{976l`cLqn;b8lp zIDY>~m{gDj(wfnK!lpW6pli)HyLEiUrNc%eXTil|F2s(AY+LW5hkKb>TQ3|Q4S9rr zpDs4uK_co6XPsn_z$LeS{K4jFF`2>U`tbgKdyDne`xmR<@6AA+_hPNKCOR-Zqv;xk zu5!HsBUb^!4uJ7v0RuH-7?l?}b=w5lzzXJ~gZcxRKOovSk@|#V+MuX%Y+=;14i*%{)_gSW9(#4%)AV#3__kac1|qUy!uyP{>?U#5wYNq}y$S9pCc zFc~4mgSC*G~j0u#qqp9 z${>3HV~@->GqEhr_Xwoxq?Hjn#=s2;i~g^&Hn|aDKpA>Oc%HlW(KA1?BXqpxB;Ydx)w;2z^MpjJ(Qi(X!$5RC z*P{~%JGDQqojV>2JbEeCE*OEu!$XJ>bWA9Oa_Hd;y)F%MhBRi*LPcdqR8X`NQ&1L# z5#9L*@qxrx8n}LfeB^J{%-?SU{FCwiWyHp682F+|pa+CQa3ZLzBqN1{)h4d6+vBbV zC#NEbQLC;}me3eeYnOG*nXOJZEU$xLZ1<1Y=7r0(-U0P6-AqwMAM`a(Ed#7vJkn6plb4eI4?2y3yOTGmmDQ!z9`wzbf z_OY#0@5=bnep;MV0X_;;SJJWEf^E6Bd^tVJ9znWx&Ks8t*B>AM@?;D4oWUGc z!H*`6d7Cxo6VuyS4Eye&L1ZRhrRmN6Lr`{NL(wDbif|y&z)JN>Fl5#Wi&mMIr5i;x zBx}3YfF>>8EC(fYnmpu~)CYHuHCyr5*`ECap%t@y=jD>!_%3iiE|LN$mK9>- zHdtpy8fGZtkZF?%TW~29JIAfi2jZT8>OA7=h;8T{{k?c2`nCEx9$r zS+*&vt~2o^^J+}RDG@+9&M^K*z4p{5#IEVbz`1%`m5c2};aGt=V?~vIM}ZdPECDI)47|CWBCfDWUbxBCnmYivQ*0Nu_xb*C>~C9(VjHM zxe<*D<#dQ8TlpMX2c@M<9$w!RP$hpG4cs%AI){jp*Sj|*`m)5(Bw*A0$*i-(CA5#%>a)$+jI2C9r6|(>J8InryENI z$NohnxDUB;wAYDwrb*!N3noBTKPpPN}~09SEL18tkG zxgz(RYU_;DPT{l?Q$+eaZaxnsWCA^ds^0PVRkIM%bOd|G2IEBBiz{&^JtNsODs;5z zICt_Zj8wo^KT$7Bg4H+y!Df#3mbl%%?|EXe!&(Vmac1DJ*y~3+kRKAD=Ovde4^^%~ zw<9av18HLyrf*_>Slp;^i`Uy~`mvBjZ|?Ad63yQa#YK`4+c6;pW4?XIY9G1(Xh9WO8{F-Aju+nS9Vmv=$Ac0ienZ+p9*O%NG zMZKy5?%Z6TAJTE?o5vEr0r>f>hb#2w2U3DL64*au_@P!J!TL`oH2r*{>ffu6|A7tv zL4juf$DZ1MW5ZPsG!5)`k8d8c$J$o;%EIL0va9&GzWvkS%ZsGb#S(?{!UFOZ9<$a| zY|a+5kmD5N&{vRqkgY>aHsBT&`rg|&kezoD)gP0fsNYHsO#TRc_$n6Lf1Z{?+DLziXlHrq4sf(!>O{?Tj;Eh@%)+nRE_2VxbN&&%%caU#JDU%vL3}Cb zsb4AazPI{>8H&d=jUaZDS$-0^AxE@utGs;-Ez_F(qC9T=UZX=>ok2k2 ziTn{K?y~a5reD2A)P${NoI^>JXn>`IeArow(41c-Wm~)wiryEP(OS{YXWi7;%dG9v zI?mwu1MxD{yp_rrk!j^cKM)dc4@p4Ezyo%lRN|XyD}}>v=Xoib0gOcdXrQ^*61HNj z=NP|pd>@yfvr-=m{8$3A8TQGMTE7g=z!%yt`8`Bk-0MMwW~h^++;qyUP!J~ykh1GO z(FZ59xuFR$(WE;F@UUyE@Sp>`aVNjyj=Ty>_Vo}xf`e7`F;j-IgL5`1~-#70$9_=uBMq!2&1l zomRgpD58@)YYfvLtPW}{C5B35R;ZVvB<<#)x%srmc_S=A7F@DW8>QOEGwD6suhwCg z>Pa+YyULhmw%BA*4yjDp|2{!T98~<6Yfd(wo1mQ!KWwq0eg+6)o1>W~f~kL<-S+P@$wx*zeI|1t7z#Sxr5 zt6w+;YblPQNplq4Z#T$GLX#j6yldXAqj>4gAnnWtBICUnA&-dtnlh=t0Ho_vEKwV` z)DlJi#!@nkYV#$!)@>udAU*hF?V`2$Hf=V&6PP_|r#Iv*J$9)pF@X3`k;5})9^o4y z&)~?EjX5yX12O(BsFy-l6}nYeuKkiq`u9145&3Ssg^y{5G3Pse z9w(YVa0)N-fLaBq1`P!_#>SS(8fh_5!f{UrgZ~uEdeMJIz7DzI5!NHHqQtm~#CPij z?=N|J>nPR6_sL7!f4hD_|KH`vf8(Wpnj-(gPWH+ZvID}%?~68SwhPTC3u1_cB`otq z)U?6qo!ZLi5b>*KnYHWW=3F!p%h1;h{L&(Q&{qY6)_qxNfbP6E3yYpW!EO+IW3?@J z);4>g4gnl^8klu7uA>eGF6rIGSynacogr)KUwE_R4E5Xzi*Qir@b-jy55-JPC8c~( zo!W8y9OGZ&`xmc8;=4-U9=h{vCqfCNzYirONmGbRQlR`WWlgnY+1wCXbMz&NT~9*| z6@FrzP!LX&{no2!Ln_3|I==_4`@}V?4a;YZKTdw;vT<+K+z=uWbW(&bXEaWJ^W8Td z-3&1bY^Z*oM<=M}LVt>_j+p=2Iu7pZmbXrhQ_k)ysE9yXKygFNw$5hwDn(M>H+e1&9BM5!|81vd%r%vEm zqxY3?F@fb6O#5UunwgAHR9jp_W2zZ}NGp2%mTW@(hz7$^+a`A?mb8|_G*GNMJ) zjqegXQio=i@AINre&%ofexAr95aop5C+0MZ0m-l=MeO8m3epm7U%vZB8+I+C*iNFM z#T3l`gknX;D$-`2XT^Cg*vrv=RH+P;_dfF++cP?B_msQI4j+lt&rX2)3GaJx%W*Nn zkML%D{z5tpHH=dksQ*gzc|}gzW;lwAbxoR07VNgS*-c3d&8J|;@3t^ zVUz*J*&r7DFRuFVDCJDK8V9NN5hvpgGjwx+5n)qa;YCKe8TKtdnh{I7NU9BCN!0dq zczrBk8pE{{@vJa9ywR@mq*J=v+PG;?fwqlJVhijG!3VmIKs>9T6r7MJpC)m!Tc#>g zMtVsU>wbwFJEfwZ{vB|ZlttNe83)$iz`~#8UJ^r)lJ@HA&G#}W&ZH*;k{=TavpjWE z7hdyLZPf*X%Gm}i`Y{OGeeu^~nB8=`{r#TUrM-`;1cBvEd#d!kPqIgYySYhN-*1;L z^byj%Yi}Gx)Wnkosi337BKs}+5H5dth1JA{Ir-JKN$7zC)*}hqeoD(WfaUDPT>0`- z(6sa0AoIqASwF`>hP}^|)a_j2s^PQn*qVC{Q}htR z5-)duBFXT_V56-+UohKXlq~^6uf!6sA#ttk1o~*QEy_Y-S$gAvq47J9Vtk$5oA$Ct zYhYJ@8{hsC^98${!#Ho?4y5MCa7iGnfz}b9jE~h%EAAv~Qxu)_rAV;^cygV~5r_~?l=B`zObj7S=H=~$W zPtI_m%g$`kL_fVUk9J@>EiBH zOO&jtn~&`hIFMS5S`g8w94R4H40mdNUH4W@@XQk1sr17b{@y|JB*G9z1|CrQjd+GX z6+KyURG3;!*BQrentw{B2R&@2&`2}n(z-2&X7#r!{yg@Soy}cRD~j zj9@UBW+N|4HW4AWapy4wfUI- zZ`gSL6DUlgj*f1hSOGXG0IVH8HxK?o2|3HZ;KW{K+yPAlxtb)NV_2AwJm|E)FRs&& z=c^e7bvUsztY|+f^k7NXs$o1EUq>cR7C0$UKi6IooHWlK_#?IWDkvywnzg&ThWo^? z2O_N{5X39#?eV9l)xI(>@!vSB{DLt*oY!K1R8}_?%+0^C{d9a%N4 zoxHVT1&Lm|uDX%$QrBun5e-F`HJ^T$ zmzv)p@4ZHd_w9!%Hf9UYNvGCw2TTTbrj9pl+T9%-_-}L(tES>Or-}Z4F*{##n3~L~TuxjirGuIY#H7{%$E${?p{Q01 zi6T`n;rbK1yIB9jmQNycD~yZq&mbIsFWHo|ZAChSFPQa<(%d8mGw*V3fh|yFoxOOiWJd(qvVb!Z$b88cg->N=qO*4k~6;R==|9ihg&riu#P~s4Oap9O7f%crSr^rljeIfXDEg>wi)&v*a%7zpz<9w z*r!3q9J|390x`Zk;g$&OeN&ctp)VKRpDSV@kU2Q>jtok($Y-*x8_$2piTxun81@vt z!Vj?COa0fg2RPXMSIo26T=~0d`{oGP*eV+$!0I<(4azk&Vj3SiG=Q!6mX0p$z7I}; z9BJUFgT-K9MQQ-0@Z=^7R<{bn2Fm48endsSs`V7_@%8?Bxkqv>BDoVcj?K#dV#uUP zL1ND~?D-|VGKe3Rw_7-Idpht>H6XRLh*U7epS6byiGvJpr%d}XwfusjH9g;Z98H`x zyde%%5mhGOiL4wljCaWCk-&uE4_OOccb9c!ZaWt4B(wYl!?vyzl%7n~QepN&eFUrw zFIOl9c({``6~QD+43*_tzP{f2x41h(?b43^y6=iwyB)2os5hBE!@YUS5?N_tXd=h( z)WE286Fbd>R4M^P{!G)f;h<3Q>Fipuy+d2q-)!RyTgt;wr$(?9ox3;q+{E*ZQHhOn;lM`cjnu9 zXa48ks-v(~b*;MAI<>YZH(^NV8vjb34beE<_cwKlJoR;k6lJNSP6v}uiyRD?|0w+X@o1ONrH8a$fCxXpf? z?$DL0)7|X}Oc%h^zrMKWc-NS9I0Utu@>*j}b@tJ=ixQSJ={4@854wzW@E>VSL+Y{i z#0b=WpbCZS>kUCO_iQz)LoE>P5LIG-hv9E+oG}DtlIDF>$tJ1aw9^LuhLEHt?BCj& z(O4I8v1s#HUi5A>nIS-JK{v!7dJx)^Yg%XjNmlkWAq2*cv#tHgz`Y(bETc6CuO1VkN^L-L3j_x<4NqYb5rzrLC-7uOv z!5e`GZt%B782C5-fGnn*GhDF$%(qP<74Z}3xx+{$4cYKy2ikxI7B2N+2r07DN;|-T->nU&!=Cm#rZt%O_5c&1Z%nlWq3TKAW0w zQqemZw_ue--2uKQsx+niCUou?HjD`xhEjjQd3%rrBi82crq*~#uA4+>vR<_S{~5ce z-2EIl?~s z1=GVL{NxP1N3%=AOaC}j_Fv=ur&THz zyO!d9kHq|c73kpq`$+t+8Bw7MgeR5~`d7ChYyGCBWSteTB>8WAU(NPYt2Dk`@#+}= zI4SvLlyk#pBgVigEe`?NG*vl7V6m+<}%FwPV=~PvvA)=#ths==DRTDEYh4V5}Cf$z@#;< zyWfLY_5sP$gc3LLl2x+Ii)#b2nhNXJ{R~vk`s5U7Nyu^3yFg&D%Txwj6QezMX`V(x z=C`{76*mNb!qHHs)#GgGZ_7|vkt9izl_&PBrsu@}L`X{95-2jf99K)0=*N)VxBX2q z((vkpP2RneSIiIUEnGb?VqbMb=Zia+rF~+iqslydE34cSLJ&BJW^3knX@M;t*b=EA zNvGzv41Ld_T+WT#XjDB840vovUU^FtN_)G}7v)1lPetgpEK9YS^OWFkPoE{ovj^=@ zO9N$S=G$1ecndT_=5ehth2Lmd1II-PuT~C9`XVePw$y8J#dpZ?Tss<6wtVglm(Ok7 z3?^oi@pPio6l&!z8JY(pJvG=*pI?GIOu}e^EB6QYk$#FJQ%^AIK$I4epJ+9t?KjqA+bkj&PQ*|vLttme+`9G=L% ziadyMw_7-M)hS(3E$QGNCu|o23|%O+VN7;Qggp?PB3K-iSeBa2b}V4_wY`G1Jsfz4 z9|SdB^;|I8E8gWqHKx!vj_@SMY^hLEIbSMCuE?WKq=c2mJK z8LoG-pnY!uhqFv&L?yEuxo{dpMTsmCn)95xanqBrNPTgXP((H$9N${Ow~Is-FBg%h z53;|Y5$MUN)9W2HBe2TD`ct^LHI<(xWrw}$qSoei?}s)&w$;&!14w6B6>Yr6Y8b)S z0r71`WmAvJJ`1h&poLftLUS6Ir zC$bG9!Im_4Zjse)#K=oJM9mHW1{%l8sz$1o?ltdKlLTxWWPB>Vk22czVt|1%^wnN@*!l)}?EgtvhC>vlHm^t+ogpgHI1_$1ox9e;>0!+b(tBrmXRB`PY1vp-R**8N7 zGP|QqI$m(Rdu#=(?!(N}G9QhQ%o!aXE=aN{&wtGP8|_qh+7a_j_sU5|J^)vxq;# zjvzLn%_QPHZZIWu1&mRAj;Sa_97p_lLq_{~j!M9N^1yp3U_SxRqK&JnR%6VI#^E12 z>CdOVI^_9aPK2eZ4h&^{pQs}xsijXgFYRIxJ~N7&BB9jUR1fm!(xl)mvy|3e6-B3j zJn#ajL;bFTYJ2+Q)tDjx=3IklO@Q+FFM}6UJr6km7hj7th9n_&JR7fnqC!hTZoM~T zBeaVFp%)0cbPhejX<8pf5HyRUj2>aXnXBqDJe73~J%P(2C?-RT{c3NjE`)om! zl$uewSgWkE66$Kb34+QZZvRn`fob~Cl9=cRk@Es}KQm=?E~CE%spXaMO6YmrMl%9Q zlA3Q$3|L1QJ4?->UjT&CBd!~ru{Ih^in&JXO=|<6J!&qp zRe*OZ*cj5bHYlz!!~iEKcuE|;U4vN1rk$xq6>bUWD*u(V@8sG^7>kVuo(QL@Ki;yL zWC!FT(q{E8#on>%1iAS0HMZDJg{Z{^!De(vSIq&;1$+b)oRMwA3nc3mdTSG#3uYO_ z>+x;7p4I;uHz?ZB>dA-BKl+t-3IB!jBRgdvAbW!aJ(Q{aT>+iz?91`C-xbe)IBoND z9_Xth{6?(y3rddwY$GD65IT#f3<(0o#`di{sh2gm{dw*#-Vnc3r=4==&PU^hCv$qd zjw;>i&?L*Wq#TxG$mFIUf>eK+170KG;~+o&1;Tom9}}mKo23KwdEM6UonXgc z!6N(@k8q@HPw{O8O!lAyi{rZv|DpgfU{py+j(X_cwpKqcalcqKIr0kM^%Br3SdeD> zHSKV94Yxw;pjzDHo!Q?8^0bb%L|wC;4U^9I#pd5O&eexX+Im{ z?jKnCcsE|H?{uGMqVie_C~w7GX)kYGWAg%-?8|N_1#W-|4F)3YTDC+QSq1s!DnOML3@d`mG%o2YbYd#jww|jD$gotpa)kntakp#K;+yo-_ZF9qrNZw<%#C zuPE@#3RocLgPyiBZ+R_-FJ_$xP!RzWm|aN)S+{$LY9vvN+IW~Kf3TsEIvP+B9Mtm! zpfNNxObWQpLoaO&cJh5>%slZnHl_Q~(-Tfh!DMz(dTWld@LG1VRF`9`DYKhyNv z2pU|UZ$#_yUx_B_|MxUq^glT}O5Xt(Vm4Mr02><%C)@v;vPb@pT$*yzJ4aPc_FZ3z z3}PLoMBIM>q_9U2rl^sGhk1VUJ89=*?7|v`{!Z{6bqFMq(mYiA?%KbsI~JwuqVA9$H5vDE+VocjX+G^%bieqx->s;XWlKcuv(s%y%D5Xbc9+ zc(_2nYS1&^yL*ey664&4`IoOeDIig}y-E~_GS?m;D!xv5-xwz+G`5l6V+}CpeJDi^ z%4ed$qowm88=iYG+(`ld5Uh&>Dgs4uPHSJ^TngXP_V6fPyl~>2bhi20QB%lSd#yYn zO05?KT1z@?^-bqO8Cg`;ft>ilejsw@2%RR7;`$Vs;FmO(Yr3Fp`pHGr@P2hC%QcA|X&N2Dn zYf`MqXdHi%cGR@%y7Rg7?d3?an){s$zA{!H;Ie5exE#c~@NhQUFG8V=SQh%UxUeiV zd7#UcYqD=lk-}sEwlpu&H^T_V0{#G?lZMxL7ih_&{(g)MWBnCZxtXg znr#}>U^6!jA%e}@Gj49LWG@*&t0V>Cxc3?oO7LSG%~)Y5}f7vqUUnQ;STjdDU}P9IF9d9<$;=QaXc zL1^X7>fa^jHBu_}9}J~#-oz3Oq^JmGR#?GO7b9a(=R@fw@}Q{{@`Wy1vIQ#Bw?>@X z-_RGG@wt|%u`XUc%W{J z>iSeiz8C3H7@St3mOr_mU+&bL#Uif;+Xw-aZdNYUpdf>Rvu0i0t6k*}vwU`XNO2he z%miH|1tQ8~ZK!zmL&wa3E;l?!!XzgV#%PMVU!0xrDsNNZUWKlbiOjzH-1Uoxm8E#r`#2Sz;-o&qcqB zC-O_R{QGuynW14@)7&@yw1U}uP(1cov)twxeLus0s|7ayrtT8c#`&2~Fiu2=R;1_4bCaD=*E@cYI>7YSnt)nQc zohw5CsK%m?8Ack)qNx`W0_v$5S}nO|(V|RZKBD+btO?JXe|~^Qqur%@eO~<8-L^9d z=GA3-V14ng9L29~XJ>a5k~xT2152zLhM*@zlp2P5Eu}bywkcqR;ISbas&#T#;HZSf z2m69qTV(V@EkY(1Dk3`}j)JMo%ZVJ*5eB zYOjIisi+igK0#yW*gBGj?@I{~mUOvRFQR^pJbEbzFxTubnrw(Muk%}jI+vXmJ;{Q6 zrSobKD>T%}jV4Ub?L1+MGOD~0Ir%-`iTnWZN^~YPrcP5y3VMAzQ+&en^VzKEb$K!Q z<7Dbg&DNXuow*eD5yMr+#08nF!;%4vGrJI++5HdCFcGLfMW!KS*Oi@=7hFwDG!h2< zPunUEAF+HncQkbfFj&pbzp|MU*~60Z(|Ik%Tn{BXMN!hZOosNIseT?R;A`W?=d?5X zK(FB=9mZusYahp|K-wyb={rOpdn=@;4YI2W0EcbMKyo~-#^?h`BA9~o285%oY zfifCh5Lk$SY@|2A@a!T2V+{^!psQkx4?x0HSV`(w9{l75QxMk!)U52Lbhn{8ol?S) zCKo*7R(z!uk<6*qO=wh!Pul{(qq6g6xW;X68GI_CXp`XwO zxuSgPRAtM8K7}5E#-GM!*ydOOG_{A{)hkCII<|2=ma*71ci_-}VPARm3crFQjLYV! z9zbz82$|l01mv`$WahE2$=fAGWkd^X2kY(J7iz}WGS z@%MyBEO=A?HB9=^?nX`@nh;7;laAjs+fbo!|K^mE!tOB>$2a_O0y-*uaIn8k^6Y zSbuv;5~##*4Y~+y7Z5O*3w4qgI5V^17u*ZeupVGH^nM&$qmAk|anf*>r zWc5CV;-JY-Z@Uq1Irpb^O`L_7AGiqd*YpGUShb==os$uN3yYvb`wm6d=?T*it&pDk zo`vhw)RZX|91^^Wa_ti2zBFyWy4cJu#g)_S6~jT}CC{DJ_kKpT`$oAL%b^!2M;JgT zM3ZNbUB?}kP(*YYvXDIH8^7LUxz5oE%kMhF!rnPqv!GiY0o}NR$OD=ITDo9r%4E>E0Y^R(rS^~XjWyVI6 zMOR5rPXhTp*G*M&X#NTL`Hu*R+u*QNoiOKg4CtNPrjgH>c?Hi4MUG#I917fx**+pJfOo!zFM&*da&G_x)L(`k&TPI*t3e^{crd zX<4I$5nBQ8Ax_lmNRa~E*zS-R0sxkz`|>7q_?*e%7bxqNm3_eRG#1ae3gtV9!fQpY z+!^a38o4ZGy9!J5sylDxZTx$JmG!wg7;>&5H1)>f4dXj;B+@6tMlL=)cLl={jLMxY zbbf1ax3S4>bwB9-$;SN2?+GULu;UA-35;VY*^9Blx)Jwyb$=U!D>HhB&=jSsd^6yw zL)?a|>GxU!W}ocTC(?-%z3!IUhw^uzc`Vz_g>-tv)(XA#JK^)ZnC|l1`@CdX1@|!| z_9gQ)7uOf?cR@KDp97*>6X|;t@Y`k_N@)aH7gY27)COv^P3ya9I{4z~vUjLR9~z1Z z5=G{mVtKH*&$*t0@}-i_v|3B$AHHYale7>E+jP`ClqG%L{u;*ff_h@)al?RuL7tOO z->;I}>%WI{;vbLP3VIQ^iA$4wl6@0sDj|~112Y4OFjMs`13!$JGkp%b&E8QzJw_L5 zOnw9joc0^;O%OpF$Qp)W1HI!$4BaXX84`%@#^dk^hFp^pQ@rx4g(8Xjy#!X%+X5Jd@fs3amGT`}mhq#L97R>OwT5-m|h#yT_-v@(k$q7P*9X~T*3)LTdzP!*B} z+SldbVWrrwQo9wX*%FyK+sRXTa@O?WM^FGWOE?S`R(0P{<6p#f?0NJvnBia?k^fX2 zNQs7K-?EijgHJY}&zsr;qJ<*PCZUd*x|dD=IQPUK_nn)@X4KWtqoJNHkT?ZWL_hF? zS8lp2(q>;RXR|F;1O}EE#}gCrY~#n^O`_I&?&z5~7N;zL0)3Tup`%)oHMK-^r$NT% zbFg|o?b9w(q@)6w5V%si<$!U<#}s#x@0aX-hP>zwS#9*75VXA4K*%gUc>+yzupTDBOKH8WR4V0pM(HrfbQ&eJ79>HdCvE=F z|J>s;;iDLB^3(9}?biKbxf1$lI!*Z%*0&8UUq}wMyPs_hclyQQi4;NUY+x2qy|0J; zhn8;5)4ED1oHwg+VZF|80<4MrL97tGGXc5Sw$wAI#|2*cvQ=jB5+{AjMiDHmhUC*a zlmiZ`LAuAn_}hftXh;`Kq0zblDk8?O-`tnilIh|;3lZp@F_osJUV9`*R29M?7H{Fy z`nfVEIDIWXmU&YW;NjU8)EJpXhxe5t+scf|VXM!^bBlwNh)~7|3?fWwo_~ZFk(22% zTMesYw+LNx3J-_|DM~`v93yXe=jPD{q;li;5PD?Dyk+b? zo21|XpT@)$BM$%F=P9J19Vi&1#{jM3!^Y&fr&_`toi`XB1!n>sbL%U9I5<7!@?t)~ z;&H%z>bAaQ4f$wIzkjH70;<8tpUoxzKrPhn#IQfS%9l5=Iu))^XC<58D!-O z{B+o5R^Z21H0T9JQ5gNJnqh#qH^na|z92=hONIM~@_iuOi|F>jBh-?aA20}Qx~EpDGElELNn~|7WRXRFnw+Wdo`|# zBpU=Cz3z%cUJ0mx_1($X<40XEIYz(`noWeO+x#yb_pwj6)R(__%@_Cf>txOQ74wSJ z0#F3(zWWaR-jMEY$7C*3HJrohc79>MCUu26mfYN)f4M~4gD`}EX4e}A!U}QV8!S47 z6y-U-%+h`1n`*pQuKE%Av0@)+wBZr9mH}@vH@i{v(m-6QK7Ncf17x_D=)32`FOjjo zg|^VPf5c6-!FxN{25dvVh#fog=NNpXz zfB$o+0jbRkHH{!TKhE709f+jI^$3#v1Nmf80w`@7-5$1Iv_`)W^px8P-({xwb;D0y z7LKDAHgX<84?l!I*Dvi2#D@oAE^J|g$3!)x1Ua;_;<@#l1fD}lqU2_tS^6Ht$1Wl} zBESo7o^)9-Tjuz$8YQSGhfs{BQV6zW7dA?0b(Dbt=UnQs&4zHfe_sj{RJ4uS-vQpC zX;Bbsuju4%!o8?&m4UZU@~ZZjeFF6ex2ss5_60_JS_|iNc+R0GIjH1@Z z=rLT9%B|WWgOrR7IiIwr2=T;Ne?30M!@{%Qf8o`!>=s<2CBpCK_TWc(DX51>e^xh8 z&@$^b6CgOd7KXQV&Y4%}_#uN*mbanXq(2=Nj`L7H7*k(6F8s6{FOw@(DzU`4-*77{ zF+dxpv}%mFpYK?>N_2*#Y?oB*qEKB}VoQ@bzm>ptmVS_EC(#}Lxxx730trt0G)#$b zE=wVvtqOct1%*9}U{q<)2?{+0TzZzP0jgf9*)arV)*e!f`|jgT{7_9iS@e)recI#z zbzolURQ+TOzE!ymqvBY7+5NnAbWxvMLsLTwEbFqW=CPyCsmJ}P1^V30|D5E|p3BC5 z)3|qgw@ra7aXb-wsa|l^in~1_fm{7bS9jhVRkYVO#U{qMp z)Wce+|DJ}4<2gp8r0_xfZpMo#{Hl2MfjLcZdRB9(B(A(f;+4s*FxV{1F|4d`*sRNd zp4#@sEY|?^FIJ;tmH{@keZ$P(sLh5IdOk@k^0uB^BWr@pk6mHy$qf&~rI>P*a;h0C{%oA*i!VjWn&D~O#MxN&f@1Po# zKN+ zrGrkSjcr?^R#nGl<#Q722^wbYcgW@{+6CBS<1@%dPA8HC!~a`jTz<`g_l5N1M@9wn9GOAZ>nqNgq!yOCbZ@1z`U_N`Z>}+1HIZxk*5RDc&rd5{3qjRh8QmT$VyS;jK z;AF+r6XnnCp=wQYoG|rT2@8&IvKq*IB_WvS%nt%e{MCFm`&W*#LXc|HrD?nVBo=(8*=Aq?u$sDA_sC_RPDUiQ+wnIJET8vx$&fxkW~kP9qXKt zozR)@xGC!P)CTkjeWvXW5&@2?)qt)jiYWWBU?AUtzAN}{JE1I)dfz~7$;}~BmQF`k zpn11qmObXwRB8&rnEG*#4Xax3XBkKlw(;tb?Np^i+H8m(Wyz9k{~ogba@laiEk;2! zV*QV^6g6(QG%vX5Um#^sT&_e`B1pBW5yVth~xUs#0}nv?~C#l?W+9Lsb_5)!71rirGvY zTIJ$OPOY516Y|_014sNv+Z8cc5t_V=i>lWV=vNu#!58y9Zl&GsMEW#pPYPYGHQ|;vFvd*9eM==$_=vc7xnyz0~ zY}r??$<`wAO?JQk@?RGvkWVJlq2dk9vB(yV^vm{=NVI8dhsX<)O(#nr9YD?I?(VmQ z^r7VfUBn<~p3()8yOBjm$#KWx!5hRW)5Jl7wY@ky9lNM^jaT##8QGVsYeaVywmpv>X|Xj7gWE1Ezai&wVLt3p)k4w~yrskT-!PR!kiyQlaxl(( zXhF%Q9x}1TMt3~u@|#wWm-Vq?ZerK={8@~&@9r5JW}r#45#rWii};t`{5#&3$W)|@ zbAf2yDNe0q}NEUvq_Quq3cTjcw z@H_;$hu&xllCI9CFDLuScEMg|x{S7GdV8<&Mq=ezDnRZAyX-8gv97YTm0bg=d)(>N z+B2FcqvI9>jGtnK%eO%y zoBPkJTk%y`8TLf4)IXPBn`U|9>O~WL2C~C$z~9|0m*YH<-vg2CD^SX#&)B4ngOSG$ zV^wmy_iQk>dfN@Pv(ckfy&#ak@MLC7&Q6Ro#!ezM*VEh`+b3Jt%m(^T&p&WJ2Oqvj zs-4nq0TW6cv~(YI$n0UkfwN}kg3_fp?(ijSV#tR9L0}l2qjc7W?i*q01=St0eZ=4h zyGQbEw`9OEH>NMuIe)hVwYHsGERWOD;JxEiO7cQv%pFCeR+IyhwQ|y@&^24k+|8fD zLiOWFNJ2&vu2&`Jv96_z-Cd5RLgmeY3*4rDOQo?Jm`;I_(+ejsPM03!ly!*Cu}Cco zrQSrEDHNyzT(D5s1rZq!8#?f6@v6dB7a-aWs(Qk>N?UGAo{gytlh$%_IhyL7h?DLXDGx zgxGEBQoCAWo-$LRvM=F5MTle`M})t3vVv;2j0HZY&G z22^iGhV@uaJh(XyyY%} zd4iH_UfdV#T=3n}(Lj^|n;O4|$;xhu*8T3hR1mc_A}fK}jfZ7LX~*n5+`8N2q#rI$ z@<_2VANlYF$vIH$ zl<)+*tIWW78IIINA7Rr7i{<;#^yzxoLNkXL)eSs=%|P>$YQIh+ea_3k z_s7r4%j7%&*NHSl?R4k%1>Z=M9o#zxY!n8sL5>BO-ZP;T3Gut>iLS@U%IBrX6BA3k z)&@q}V8a{X<5B}K5s(c(LQ=%v1ocr`t$EqqY0EqVjr65usa=0bkf|O#ky{j3)WBR(((L^wmyHRzoWuL2~WTC=`yZ zn%VX`L=|Ok0v7?s>IHg?yArBcync5rG#^+u)>a%qjES%dRZoIyA8gQ;StH z1Ao7{<&}6U=5}4v<)1T7t!J_CL%U}CKNs-0xWoTTeqj{5{?Be$L0_tk>M9o8 zo371}S#30rKZFM{`H_(L`EM9DGp+Mifk&IP|C2Zu_)Ghr4Qtpmkm1osCf@%Z$%t+7 zYH$Cr)Ro@3-QDeQJ8m+x6%;?YYT;k6Z0E-?kr>x33`H%*ueBD7Zx~3&HtWn0?2Wt} zTG}*|v?{$ajzt}xPzV%lL1t-URi8*Zn)YljXNGDb>;!905Td|mpa@mHjIH%VIiGx- zd@MqhpYFu4_?y5N4xiHn3vX&|e6r~Xt> zZG`aGq|yTNjv;9E+Txuoa@A(9V7g?1_T5FzRI;!=NP1Kqou1z5?%X~Wwb{trRfd>i z8&y^H)8YnKyA_Fyx>}RNmQIczT?w2J4SNvI{5J&}Wto|8FR(W;Qw#b1G<1%#tmYzQ zQ2mZA-PAdi%RQOhkHy9Ea#TPSw?WxwL@H@cbkZwIq0B!@ns}niALidmn&W?!Vd4Gj zO7FiuV4*6Mr^2xlFSvM;Cp_#r8UaqIzHJQg_z^rEJw&OMm_8NGAY2)rKvki|o1bH~ z$2IbfVeY2L(^*rMRU1lM5Y_sgrDS`Z??nR2lX;zyR=c%UyGb*%TC-Dil?SihkjrQy~TMv6;BMs7P8il`H7DmpVm@rJ;b)hW)BL)GjS154b*xq-NXq2cwE z^;VP7ua2pxvCmxrnqUYQMH%a%nHmwmI33nJM(>4LznvY*k&C0{8f*%?zggpDgkuz&JBx{9mfb@wegEl2v!=}Sq2Gaty0<)UrOT0{MZtZ~j5y&w zXlYa_jY)I_+VA-^#mEox#+G>UgvM!Ac8zI<%JRXM_73Q!#i3O|)lOP*qBeJG#BST0 zqohi)O!|$|2SeJQo(w6w7%*92S})XfnhrH_Z8qe!G5>CglP=nI7JAOW?(Z29;pXJ9 zR9`KzQ=WEhy*)WH>$;7Cdz|>*i>=##0bB)oU0OR>>N<21e4rMCHDemNi2LD>Nc$;& zQRFthpWniC1J6@Zh~iJCoLOxN`oCKD5Q4r%ynwgUKPlIEd#?QViIqovY|czyK8>6B zSP%{2-<;%;1`#0mG^B(8KbtXF;Nf>K#Di72UWE4gQ%(_26Koiad)q$xRL~?pN71ZZ zujaaCx~jXjygw;rI!WB=xrOJO6HJ!!w}7eiivtCg5K|F6$EXa)=xUC za^JXSX98W`7g-tm@uo|BKj39Dl;sg5ta;4qjo^pCh~{-HdLl6qI9Ix6f$+qiZ$}s= zNguKrU;u+T@ko(Vr1>)Q%h$?UKXCY>3se%&;h2osl2D zE4A9bd7_|^njDd)6cI*FupHpE3){4NQ*$k*cOWZ_?CZ>Z4_fl@n(mMnYK62Q1d@+I zr&O))G4hMihgBqRIAJkLdk(p(D~X{-oBUA+If@B}j& zsHbeJ3RzTq96lB7d($h$xTeZ^gP0c{t!Y0c)aQE;$FY2!mACg!GDEMKXFOPI^)nHZ z`aSPJpvV0|bbrzhWWkuPURlDeN%VT8tndV8?d)eN*i4I@u zVKl^6{?}A?P)Fsy?3oi#clf}L18t;TjNI2>eI&(ezDK7RyqFxcv%>?oxUlonv(px) z$vnPzRH`y5A(x!yOIfL0bmgeMQB$H5wenx~!ujQK*nUBW;@Em&6Xv2%s(~H5WcU2R z;%Nw<$tI)a`Ve!>x+qegJnQsN2N7HaKzrFqM>`6R*gvh%O*-%THt zrB$Nk;lE;z{s{r^PPm5qz(&lM{sO*g+W{sK+m3M_z=4=&CC>T`{X}1Vg2PEfSj2x_ zmT*(x;ov%3F?qoEeeM>dUn$a*?SIGyO8m806J1W1o+4HRhc2`9$s6hM#qAm zChQ87b~GEw{ADfs+5}FJ8+|bIlIv(jT$Ap#hSHoXdd9#w<#cA<1Rkq^*EEkknUd4& zoIWIY)sAswy6fSERVm&!SO~#iN$OgOX*{9@_BWFyJTvC%S++ilSfCrO(?u=Dc?CXZ zzCG&0yVR{Z`|ZF0eEApWEo#s9osV>F{uK{QA@BES#&;#KsScf>y zvs?vIbI>VrT<*!;XmQS=bhq%46-aambZ(8KU-wOO2=en~D}MCToB_u;Yz{)1ySrPZ z@=$}EvjTdzTWU7c0ZI6L8=yP+YRD_eMMos}b5vY^S*~VZysrkq<`cK3>>v%uy7jgq z0ilW9KjVDHLv0b<1K_`1IkbTOINs0=m-22c%M~l=^S}%hbli-3?BnNq?b`hx^HX2J zIe6ECljRL0uBWb`%{EA=%!i^4sMcj+U_TaTZRb+~GOk z^ZW!nky0n*Wb*r+Q|9H@ml@Z5gU&W`(z4-j!OzC1wOke`TRAYGZVl$PmQ16{3196( zO*?`--I}Qf(2HIwb2&1FB^!faPA2=sLg(@6P4mN)>Dc3i(B0;@O-y2;lM4akD>@^v z=u>*|!s&9zem70g7zfw9FXl1bpJW(C#5w#uy5!V?Q(U35A~$dR%LDVnq@}kQm13{} zd53q3N(s$Eu{R}k2esbftfjfOITCL;jWa$}(mmm}d(&7JZ6d3%IABCapFFYjdEjdK z&4Edqf$G^MNAtL=uCDRs&Fu@FXRgX{*0<(@c3|PNHa>L%zvxWS={L8%qw`STm+=Rd zA}FLspESSIpE_^41~#5yI2bJ=9`oc;GIL!JuW&7YetZ?0H}$$%8rW@*J37L-~Rsx!)8($nI4 zZhcZ2^=Y+p4YPl%j!nFJA|*M^gc(0o$i3nlphe+~-_m}jVkRN{spFs(o0ajW@f3K{ zDV!#BwL322CET$}Y}^0ixYj2w>&Xh12|R8&yEw|wLDvF!lZ#dOTHM9pK6@Nm-@9Lnng4ZHBgBSrr7KI8YCC9DX5Kg|`HsiwJHg2(7#nS;A{b3tVO?Z% za{m5b3rFV6EpX;=;n#wltDv1LE*|g5pQ+OY&*6qCJZc5oDS6Z6JD#6F)bWxZSF@q% z+1WV;m!lRB!n^PC>RgQCI#D1br_o^#iPk>;K2hB~0^<~)?p}LG%kigm@moD#q3PE+ zA^Qca)(xnqw6x>XFhV6ku9r$E>bWNrVH9fum0?4s?Rn2LG{Vm_+QJHse6xa%nzQ?k zKug4PW~#Gtb;#5+9!QBgyB@q=sk9=$S{4T>wjFICStOM?__fr+Kei1 z3j~xPqW;W@YkiUM;HngG!;>@AITg}vAE`M2Pj9Irl4w1fo4w<|Bu!%rh%a(Ai^Zhi zs92>v5;@Y(Zi#RI*ua*h`d_7;byQSa*v9E{2x$<-_=5Z<7{%)}4XExANcz@rK69T0x3%H<@frW>RA8^swA+^a(FxK| zFl3LD*ImHN=XDUkrRhp6RY5$rQ{bRgSO*(vEHYV)3Mo6Jy3puiLmU&g82p{qr0F?ohmbz)f2r{X2|T2 z$4fdQ=>0BeKbiVM!e-lIIs8wVTuC_m7}y4A_%ikI;Wm5$9j(^Y z(cD%U%k)X>_>9~t8;pGzL6L-fmQO@K; zo&vQzMlgY95;1BSkngY)e{`n0!NfVgf}2mB3t}D9@*N;FQ{HZ3Pb%BK6;5#-O|WI( zb6h@qTLU~AbVW#_6?c!?Dj65Now7*pU{h!1+eCV^KCuPAGs28~3k@ueL5+u|Z-7}t z9|lskE`4B7W8wMs@xJa{#bsCGDFoRSNSnmNYB&U7 zVGKWe%+kFB6kb)e;TyHfqtU6~fRg)f|>=5(N36)0+C z`hv65J<$B}WUc!wFAb^QtY31yNleq4dzmG`1wHTj=c*=hay9iD071Hc?oYoUk|M*_ zU1GihAMBsM@5rUJ(qS?9ZYJ6@{bNqJ`2Mr+5#hKf?doa?F|+^IR!8lq9)wS3tF_9n zW_?hm)G(M+MYb?V9YoX^_mu5h-LP^TL^!Q9Z7|@sO(rg_4+@=PdI)WL(B7`!K^ND- z-uIuVDCVEdH_C@c71YGYT^_Scf_dhB8Z2Xy6vGtBSlYud9vggOqv^L~F{BraSE_t} zIkP+Hp2&nH^-MNEs}^`oMLy11`PQW$T|K(`Bu*(f@)mv1-qY(_YG&J2M2<7k;;RK~ zL{Fqj9yCz8(S{}@c)S!65aF<=&eLI{hAMErCx&>i7OeDN>okvegO87OaG{Jmi<|}D zaT@b|0X{d@OIJ7zvT>r+eTzgLq~|Dpu)Z&db-P4z*`M$UL51lf>FLlq6rfG)%doyp z)3kk_YIM!03eQ8Vu_2fg{+osaEJPtJ-s36R+5_AEG12`NG)IQ#TF9c@$99%0iye+ zUzZ57=m2)$D(5Nx!n)=5Au&O0BBgwxIBaeI(mro$#&UGCr<;C{UjJVAbVi%|+WP(a zL$U@TYCxJ=1{Z~}rnW;7UVb7+ZnzgmrogDxhjLGo>c~MiJAWs&&;AGg@%U?Y^0JhL ze(x6Z74JG6FlOFK(T}SXQfhr}RIFl@QXKnIcXYF)5|V~e-}suHILKT-k|<*~Ij|VF zC;t@=uj=hot~*!C68G8hTA%8SzOfETOXQ|3FSaIEjvBJp(A)7SWUi5!Eu#yWgY+;n zlm<$+UDou*V+246_o#V4kMdto8hF%%Lki#zPh}KYXmMf?hrN0;>Mv%`@{0Qn`Ujp) z=lZe+13>^Q!9zT);H<(#bIeRWz%#*}sgUX9P|9($kexOyKIOc`dLux}c$7It4u|Rl z6SSkY*V~g_B-hMPo_ak>>z@AVQ(_N)VY2kB3IZ0G(iDUYw+2d7W^~(Jq}KY=JnWS( z#rzEa&0uNhJ>QE8iiyz;n2H|SV#Og+wEZv=f2%1ELX!SX-(d3tEj$5$1}70Mp<&eI zCkfbByL7af=qQE@5vDVxx1}FSGt_a1DoE3SDI+G)mBAna)KBG4p8Epxl9QZ4BfdAN zFnF|Y(umr;gRgG6NLQ$?ZWgllEeeq~z^ZS7L?<(~O&$5|y)Al^iMKy}&W+eMm1W z7EMU)u^ke(A1#XCV>CZ71}P}0x)4wtHO8#JRG3MA-6g=`ZM!FcICCZ{IEw8Dm2&LQ z1|r)BUG^0GzI6f946RrBlfB1Vs)~8toZf~7)+G;pv&XiUO(%5bm)pl=p>nV^o*;&T z;}@oZSibzto$arQgfkp|z4Z($P>dTXE{4O=vY0!)kDO* zGF8a4wq#VaFpLfK!iELy@?-SeRrdz%F*}hjKcA*y@mj~VD3!it9lhRhX}5YOaR9$} z3mS%$2Be7{l(+MVx3 z(4?h;P!jnRmX9J9sYN#7i=iyj_5q7n#X(!cdqI2lnr8T$IfOW<_v`eB!d9xY1P=2q&WtOXY=D9QYteP)De?S4}FK6#6Ma z=E*V+#s8>L;8aVroK^6iKo=MH{4yEZ_>N-N z`(|;aOATba1^asjxlILk<4}f~`39dBFlxj>Dw(hMYKPO3EEt1@S`1lxFNM+J@uB7T zZ8WKjz7HF1-5&2=l=fqF-*@>n5J}jIxdDwpT?oKM3s8Nr`x8JnN-kCE?~aM1H!hAE z%%w(3kHfGwMnMmNj(SU(w42OrC-euI>Dsjk&jz3ts}WHqmMpzQ3vZrsXrZ|}+MHA7 z068obeXZTsO*6RS@o3x80E4ok``rV^Y3hr&C1;|ZZ0|*EKO`$lECUYG2gVFtUTw)R z4Um<0ZzlON`zTdvVdL#KFoMFQX*a5wM0Czp%wTtfK4Sjs)P**RW&?lP$(<}q%r68Z zS53Y!d@&~ne9O)A^tNrXHhXBkj~$8j%pT1%%mypa9AW5E&s9)rjF4@O3ytH{0z6riz|@< zB~UPh*wRFg2^7EbQrHf0y?E~dHlkOxof_a?M{LqQ^C!i2dawHTPYUE=X@2(3<=OOxs8qn_(y>pU>u^}3y&df{JarR0@VJn0f+U%UiF=$Wyq zQvnVHESil@d|8&R<%}uidGh7@u^(%?$#|&J$pvFC-n8&A>utA=n3#)yMkz+qnG3wd zP7xCnF|$9Dif@N~L)Vde3hW8W!UY0BgT2v(wzp;tlLmyk2%N|0jfG$%<;A&IVrOI< z!L)o>j>;dFaqA3pL}b-Je(bB@VJ4%!JeX@3x!i{yIeIso^=n?fDX`3bU=eG7sTc%g%ye8$v8P@yKE^XD=NYxTb zbf!Mk=h|otpqjFaA-vs5YOF-*GwWPc7VbaOW&stlANnCN8iftFMMrUdYNJ_Bnn5Vt zxfz@Ah|+4&P;reZxp;MmEI7C|FOv8NKUm8njF7Wb6Gi7DeODLl&G~}G4be&*Hi0Qw z5}77vL0P+7-B%UL@3n1&JPxW^d@vVwp?u#gVcJqY9#@-3X{ok#UfW3<1fb%FT`|)V~ggq z(3AUoUS-;7)^hCjdT0Kf{i}h)mBg4qhtHHBti=~h^n^OTH5U*XMgDLIR@sre`AaB$ zg)IGBET_4??m@cx&c~bA80O7B8CHR7(LX7%HThkeC*@vi{-pL%e)yXp!B2InafbDF zjPXf1mko3h59{lT6EEbxKO1Z5GF71)WwowO6kY|6tjSVSWdQ}NsK2x{>i|MKZK8%Q zfu&_0D;CO-Jg0#YmyfctyJ!mRJp)e#@O0mYdp|8x;G1%OZQ3Q847YWTyy|%^cpA;m zze0(5p{tMu^lDkpe?HynyO?a1$_LJl2L&mpeKu%8YvgRNr=%2z${%WThHG=vrWY@4 zsA`OP#O&)TetZ>s%h!=+CE15lOOls&nvC~$Qz0Ph7tHiP;O$i|eDwpT{cp>+)0-|; zY$|bB+Gbel>5aRN3>c0x)4U=|X+z+{ zn*_p*EQoquRL+=+p;=lm`d71&1NqBz&_ph)MXu(Nv6&XE7(RsS)^MGj5Q?Fwude-(sq zjJ>aOq!7!EN>@(fK7EE#;i_BGvli`5U;r!YA{JRodLBc6-`n8K+Fjgwb%sX;j=qHQ z7&Tr!)!{HXoO<2BQrV9Sw?JRaLXV8HrsNevvnf>Y-6|{T!pYLl7jp$-nEE z#X!4G4L#K0qG_4Z;Cj6=;b|Be$hi4JvMH!-voxqx^@8cXp`B??eFBz2lLD8RRaRGh zn7kUfy!YV~p(R|p7iC1Rdgt$_24i0cd-S8HpG|`@my70g^y`gu%#Tf_L21-k?sRRZHK&at(*ED0P8iw{7?R$9~OF$Ko;Iu5)ur5<->x!m93Eb zFYpIx60s=Wxxw=`$aS-O&dCO_9?b1yKiPCQmSQb>T)963`*U+Ydj5kI(B(B?HNP8r z*bfSBpSu)w(Z3j7HQoRjUG(+d=IaE~tv}y14zHHs|0UcN52fT8V_<@2ep_ee{QgZG zmgp8iv4V{k;~8@I%M3<#B;2R>Ef(Gg_cQM7%}0s*^)SK6!Ym+~P^58*wnwV1BW@eG z4sZLqsUvBbFsr#8u7S1r4teQ;t)Y@jnn_m5jS$CsW1um!p&PqAcc8!zyiXHVta9QC zY~wCwCF0U%xiQPD_INKtTb;A|Zf29(mu9NI;E zc-e>*1%(LSXB`g}kd`#}O;veb<(sk~RWL|f3ljxCnEZDdNSTDV6#Td({6l&y4IjKF z^}lIUq*ZUqgTPumD)RrCN{M^jhY>E~1pn|KOZ5((%F)G|*ZQ|r4zIbrEiV%42hJV8 z3xS)=!X1+=olbdGJ=yZil?oXLct8FM{(6ikLL3E%=q#O6(H$p~gQu6T8N!plf!96| z&Q3=`L~>U0zZh;z(pGR2^S^{#PrPxTRHD1RQOON&f)Siaf`GLj#UOk&(|@0?zm;Sx ztsGt8=29-MZs5CSf1l1jNFtNt5rFNZxJPvkNu~2}7*9468TWm>nN9TP&^!;J{-h)_ z7WsHH9|F%I`Pb!>KAS3jQWKfGivTVkMJLO-HUGM_a4UQ_%RgL6WZvrW+Z4ujZn;y@ zz9$=oO!7qVTaQAA^BhX&ZxS*|5dj803M=k&2%QrXda`-Q#IoZL6E(g+tN!6CA!CP* zCpWtCujIea)ENl0liwVfj)Nc<9mV%+e@=d`haoZ*`B7+PNjEbXBkv=B+Pi^~L#EO$D$ZqTiD8f<5$eyb54-(=3 zh)6i8i|jp(@OnRrY5B8t|LFXFQVQ895n*P16cEKTrT*~yLH6Z4e*bZ5otpRDri&+A zfNbK1D5@O=sm`fN=WzWyse!za5n%^+6dHPGX#8DyIK>?9qyX}2XvBWVqbP%%D)7$= z=#$WulZlZR<{m#gU7lwqK4WS1Ne$#_P{b17qe$~UOXCl>5b|6WVh;5vVnR<%d+Lnp z$uEmML38}U4vaW8>shm6CzB(Wei3s#NAWE3)a2)z@i{4jTn;;aQS)O@l{rUM`J@K& l00vQ5JBs~;vo!vr%%-k{2_Fq1Mn4QF81S)AQ99zk{{c4yR+0b! literal 63721 zcmb5Wb9gP!wgnp7wrv|bwr$&XvSZt}Z6`anZSUAlc9NHKf9JdJ;NJVr`=eI(_pMp0 zy1VAAG3FfAOI`{X1O)&90s;U4K;XLp008~hCjbEC_fbYfS%6kTR+JtXK>nW$ZR+`W ze|#J8f4A@M|F5BpfUJb5h>|j$jOe}0oE!`Zf6fM>CR?!y@zU(cL8NsKk`a z6tx5mAkdjD;J=LcJ;;Aw8p!v#ouk>mUDZF@ zK>yvw%+bKu+T{Nk@LZ;zkYy0HBKw06_IWcMHo*0HKpTsEFZhn5qCHH9j z)|XpN&{`!0a>Vl+PmdQc)Yg4A(AG-z!+@Q#eHr&g<9D?7E)_aEB?s_rx>UE9TUq|? z;(ggJt>9l?C|zoO@5)tu?EV0x_7T17q4fF-q3{yZ^ipUbKcRZ4Qftd!xO(#UGhb2y>?*@{xq%`(-`2T^vc=#< zx!+@4pRdk&*1ht2OWk^Z5IAQ0YTAXLkL{(D*$gENaD)7A%^XXrCchN&z2x+*>o2FwPFjWpeaL=!tzv#JOW#( z$B)Nel<+$bkH1KZv3&-}=SiG~w2sbDbAWarg%5>YbC|}*d9hBjBkR(@tyM0T)FO$# zPtRXukGPnOd)~z=?avu+4Co@wF}1T)-uh5jI<1$HLtyDrVak{gw`mcH@Q-@wg{v^c zRzu}hMKFHV<8w}o*yg6p@Sq%=gkd~;`_VGTS?L@yVu`xuGy+dH6YOwcP6ZE`_0rK% zAx5!FjDuss`FQ3eF|mhrWkjux(Pny^k$u_)dyCSEbAsecHsq#8B3n3kDU(zW5yE|( zgc>sFQywFj5}U*qtF9Y(bi*;>B7WJykcAXF86@)z|0-Vm@jt!EPoLA6>r)?@DIobIZ5Sx zsc@OC{b|3%vaMbyeM|O^UxEYlEMHK4r)V-{r)_yz`w1*xV0|lh-LQOP`OP`Pk1aW( z8DSlGN>Ts|n*xj+%If~+E_BxK)~5T#w6Q1WEKt{!Xtbd`J;`2a>8boRo;7u2M&iOop4qcy<)z023=oghSFV zST;?S;ye+dRQe>ygiJ6HCv4;~3DHtJ({fWeE~$H@mKn@Oh6Z(_sO>01JwH5oA4nvK zr5Sr^g+LC zLt(i&ecdmqsIJGNOSUyUpglvhhrY8lGkzO=0USEKNL%8zHshS>Qziu|`eyWP^5xL4 zRP122_dCJl>hZc~?58w~>`P_s18VoU|7(|Eit0-lZRgLTZKNq5{k zE?V=`7=R&ro(X%LTS*f+#H-mGo_j3dm@F_krAYegDLk6UV{`UKE;{YSsn$ z(yz{v1@p|p!0>g04!eRSrSVb>MQYPr8_MA|MpoGzqyd*$@4j|)cD_%^Hrd>SorF>@ zBX+V<@vEB5PRLGR(uP9&U&5=(HVc?6B58NJT_igiAH*q~Wb`dDZpJSKfy5#Aag4IX zj~uv74EQ_Q_1qaXWI!7Vf@ZrdUhZFE;L&P_Xr8l@GMkhc#=plV0+g(ki>+7fO%?Jb zl+bTy7q{w^pTb{>(Xf2q1BVdq?#f=!geqssXp z4pMu*q;iiHmA*IjOj4`4S&|8@gSw*^{|PT}Aw~}ZXU`6=vZB=GGeMm}V6W46|pU&58~P+?LUs%n@J}CSrICkeng6YJ^M? zS(W?K4nOtoBe4tvBXs@@`i?4G$S2W&;$z8VBSM;Mn9 zxcaEiQ9=vS|bIJ>*tf9AH~m&U%2+Dim<)E=}KORp+cZ^!@wI`h1NVBXu{@%hB2Cq(dXx_aQ9x3mr*fwL5!ZryQqi|KFJuzvP zK1)nrKZ7U+B{1ZmJub?4)Ln^J6k!i0t~VO#=q1{?T)%OV?MN}k5M{}vjyZu#M0_*u z8jwZKJ#Df~1jcLXZL7bnCEhB6IzQZ-GcoQJ!16I*39iazoVGugcKA{lhiHg4Ta2fD zk1Utyc5%QzZ$s3;p0N+N8VX{sd!~l*Ta3|t>lhI&G`sr6L~G5Lul`>m z{!^INm?J|&7X=;{XveF!(b*=?9NAp4y&r&N3(GKcW4rS(Ejk|Lzs1PrxPI_owB-`H zg3(Rruh^&)`TKA6+_!n>RdI6pw>Vt1_j&+bKIaMTYLiqhZ#y_=J8`TK{Jd<7l9&sY z^^`hmi7^14s16B6)1O;vJWOF$=$B5ONW;;2&|pUvJlmeUS&F;DbSHCrEb0QBDR|my zIs+pE0Y^`qJTyH-_mP=)Y+u^LHcuZhsM3+P||?+W#V!_6E-8boP#R-*na4!o-Q1 zVthtYhK{mDhF(&7Okzo9dTi03X(AE{8cH$JIg%MEQca`S zy@8{Fjft~~BdzWC(di#X{ny;!yYGK9b@=b|zcKZ{vv4D8i+`ilOPl;PJl{!&5-0!w z^fOl#|}vVg%=n)@_e1BrP)`A zKPgs`O0EO}Y2KWLuo`iGaKu1k#YR6BMySxQf2V++Wo{6EHmK>A~Q5o73yM z-RbxC7Qdh0Cz!nG+7BRZE>~FLI-?&W_rJUl-8FDIaXoNBL)@1hwKa^wOr1($*5h~T zF;%f^%<$p8Y_yu(JEg=c_O!aZ#)Gjh$n(hfJAp$C2he555W5zdrBqjFmo|VY+el;o z=*D_w|GXG|p0**hQ7~9-n|y5k%B}TAF0iarDM!q-jYbR^us(>&y;n^2l0C%@2B}KM zyeRT9)oMt97Agvc4sEKUEy%MpXr2vz*lb zh*L}}iG>-pqDRw7ud{=FvTD?}xjD)w{`KzjNom-$jS^;iw0+7nXSnt1R@G|VqoRhE%12nm+PH?9`(4rM0kfrZzIK9JU=^$YNyLvAIoxl#Q)xxDz!^0@zZ zSCs$nfcxK_vRYM34O<1}QHZ|hp4`ioX3x8(UV(FU$J@o%tw3t4k1QPmlEpZa2IujG&(roX_q*%e`Hq|);0;@k z0z=fZiFckp#JzW0p+2A+D$PC~IsakhJJkG(c;CqAgFfU0Z`u$PzG~-9I1oPHrCw&)@s^Dc~^)#HPW0Ra}J^=|h7Fs*<8|b13ZzG6MP*Q1dkoZ6&A^!}|hbjM{2HpqlSXv_UUg1U4gn z3Q)2VjU^ti1myodv+tjhSZp%D978m~p& z43uZUrraHs80Mq&vcetqfQpQP?m!CFj)44t8Z}k`E798wxg&~aCm+DBoI+nKq}&j^ zlPY3W$)K;KtEajks1`G?-@me7C>{PiiBu+41#yU_c(dITaqE?IQ(DBu+c^Ux!>pCj zLC|HJGU*v+!it1(;3e`6igkH(VA)-S+k(*yqxMgUah3$@C zz`7hEM47xr>j8^g`%*f=6S5n>z%Bt_Fg{Tvmr+MIsCx=0gsu_sF`q2hlkEmisz#Fy zj_0;zUWr;Gz}$BS%Y`meb(=$d%@Crs(OoJ|}m#<7=-A~PQbyN$x%2iXP2@e*nO0b7AwfH8cCUa*Wfu@b)D_>I*%uE4O3 z(lfnB`-Xf*LfC)E}e?%X2kK7DItK6Tf<+M^mX0Ijf_!IP>7c8IZX%8_#0060P{QMuV^B9i<^E`_Qf0pv9(P%_s8D`qvDE9LK9u-jB}J2S`(mCO&XHTS04Z5Ez*vl^T%!^$~EH8M-UdwhegL>3IQ*)(MtuH2Xt1p!fS4o~*rR?WLxlA!sjc2(O znjJn~wQ!Fp9s2e^IWP1C<4%sFF}T4omr}7+4asciyo3DntTgWIzhQpQirM$9{EbQd z3jz9vS@{aOqTQHI|l#aUV@2Q^Wko4T0T04Me4!2nsdrA8QY1%fnAYb~d2GDz@lAtfcHq(P7 zaMBAGo}+NcE-K*@9y;Vt3*(aCaMKXBB*BJcD_Qnxpt75r?GeAQ}*|>pYJE=uZb73 zC>sv)18)q#EGrTG6io*}JLuB_jP3AU1Uiu$D7r|2_zlIGb9 zjhst#ni)Y`$)!fc#reM*$~iaYoz~_Cy7J3ZTiPm)E?%`fbk`3Tu-F#`{i!l5pNEn5 zO-Tw-=TojYhzT{J=?SZj=Z8#|eoF>434b-DXiUsignxXNaR3 zm_}4iWU$gt2Mw5NvZ5(VpF`?X*f2UZDs1TEa1oZCif?Jdgr{>O~7}-$|BZ7I(IKW`{f;@|IZFX*R8&iT= zoWstN8&R;}@2Ka%d3vrLtR|O??ben;k8QbS-WB0VgiCz;<$pBmIZdN!aalyCSEm)crpS9dcD^Y@XT1a3+zpi-`D}e#HV<} z$Y(G&o~PvL-xSVD5D?JqF3?B9rxGWeb=oEGJ3vRp5xfBPlngh1O$yI95EL+T8{GC@ z98i1H9KhZGFl|;`)_=QpM6H?eDPpw~^(aFQWwyXZ8_EEE4#@QeT_URray*mEOGsGc z6|sdXtq!hVZo=d#+9^@lm&L5|q&-GDCyUx#YQiccq;spOBe3V+VKdjJA=IL=Zn%P} zNk=_8u}VhzFf{UYZV0`lUwcD&)9AFx0@Fc6LD9A6Rd1=ga>Mi0)_QxM2ddCVRmZ0d z+J=uXc(?5JLX3=)e)Jm$HS2yF`44IKhwRnm2*669_J=2LlwuF5$1tAo@ROSU@-y+;Foy2IEl2^V1N;fk~YR z?&EP8#t&m0B=?aJeuz~lHjAzRBX>&x=A;gIvb>MD{XEV zV%l-+9N-)i;YH%nKP?>f`=?#`>B(`*t`aiPLoQM(a6(qs4p5KFjDBN?8JGrf3z8>= zi7sD)c)Nm~x{e<^jy4nTx${P~cwz_*a>%0_;ULou3kHCAD7EYkw@l$8TN#LO9jC( z1BeFW`k+bu5e8Ns^a8dPcjEVHM;r6UX+cN=Uy7HU)j-myRU0wHd$A1fNI~`4;I~`zC)3ul#8#^rXVSO*m}Ag>c%_;nj=Nv$rCZ z*~L@C@OZg%Q^m)lc-kcX&a*a5`y&DaRxh6O*dfhLfF+fU5wKs(1v*!TkZidw*)YBP za@r`3+^IHRFeO%!ai%rxy;R;;V^Fr=OJlpBX;(b*3+SIw}7= zIq$*Thr(Zft-RlY)D3e8V;BmD&HOfX+E$H#Y@B3?UL5L~_fA-@*IB-!gItK7PIgG9 zgWuGZK_nuZjHVT_Fv(XxtU%)58;W39vzTI2n&)&4Dmq7&JX6G>XFaAR{7_3QB6zsT z?$L8c*WdN~nZGiscY%5KljQARN;`w$gho=p006z;n(qIQ*Zu<``TMO3n0{ARL@gYh zoRwS*|Niw~cR!?hE{m*y@F`1)vx-JRfqET=dJ5_(076st(=lFfjtKHoYg`k3oNmo_ zNbQEw8&sO5jAYmkD|Zaz_yUb0rC})U!rCHOl}JhbYIDLzLvrZVw0~JO`d*6f;X&?V=#T@ND*cv^I;`sFeq4 z##H5;gpZTb^0Hz@3C*~u0AqqNZ-r%rN3KD~%Gw`0XsIq$(^MEb<~H(2*5G^<2(*aI z%7}WB+TRlMIrEK#s0 z93xn*Ohb=kWFc)BNHG4I(~RPn-R8#0lqyBBz5OM6o5|>x9LK@%HaM}}Y5goCQRt2C z{j*2TtT4ne!Z}vh89mjwiSXG=%DURar~=kGNNaO_+Nkb+tRi~Rkf!7a$*QlavziD( z83s4GmQ^Wf*0Bd04f#0HX@ua_d8 z23~z*53ePD6@xwZ(vdl0DLc=>cPIOPOdca&MyR^jhhKrdQO?_jJh`xV3GKz&2lvP8 zEOwW6L*ufvK;TN{=S&R@pzV^U=QNk^Ec}5H z+2~JvEVA{`uMAr)?Kf|aW>33`)UL@bnfIUQc~L;TsTQ6>r-<^rB8uoNOJ>HWgqMI8 zSW}pZmp_;z_2O5_RD|fGyTxaxk53Hg_3Khc<8AUzV|ZeK{fp|Ne933=1&_^Dbv5^u zB9n=*)k*tjHDRJ@$bp9mrh}qFn*s}npMl5BMDC%Hs0M0g-hW~P*3CNG06G!MOPEQ_ zi}Qs-6M8aMt;sL$vlmVBR^+Ry<64jrm1EI1%#j?c?4b*7>)a{aDw#TfTYKq+SjEFA z(aJ&z_0?0JB83D-i3Vh+o|XV4UP+YJ$9Boid2^M2en@APw&wx7vU~t$r2V`F|7Qfo z>WKgI@eNBZ-+Og<{u2ZiG%>YvH2L3fNpV9J;WLJoBZda)01Rn;o@){01{7E#ke(7U zHK>S#qZ(N=aoae*4X!0A{)nu0R_sKpi1{)u>GVjC+b5Jyl6#AoQ-1_3UDovNSo`T> z?c-@7XX*2GMy?k?{g)7?Sv;SJkmxYPJPs!&QqB12ejq`Lee^-cDveVWL^CTUldb(G zjDGe(O4P=S{4fF=#~oAu>LG>wrU^z_?3yt24FOx>}{^lCGh8?vtvY$^hbZ)9I0E3r3NOlb9I?F-Yc=r$*~l`4N^xzlV~N zl~#oc>U)Yjl0BxV>O*Kr@lKT{Z09OXt2GlvE38nfs+DD7exl|&vT;)>VFXJVZp9Np zDK}aO;R3~ag$X*|hRVY3OPax|PG`@_ESc8E!mHRByJbZQRS38V2F__7MW~sgh!a>98Q2%lUNFO=^xU52|?D=IK#QjwBky-C>zOWlsiiM&1n z;!&1((Xn1$9K}xabq~222gYvx3hnZPg}VMF_GV~5ocE=-v>V=T&RsLBo&`)DOyIj* zLV{h)JU_y*7SdRtDajP_Y+rBkNN*1_TXiKwHH2&p51d(#zv~s#HwbNy?<+(=9WBvo zw2hkk2Dj%kTFhY+$T+W-b7@qD!bkfN#Z2ng@Pd=i3-i?xYfs5Z*1hO?kd7Sp^9`;Y zM2jeGg<-nJD1er@Pc_cSY7wo5dzQX44=%6rn}P_SRbpzsA{6B+!$3B0#;}qwO37G^ zL(V_5JK`XT?OHVk|{_$vQ|oNEpab*BO4F zUTNQ7RUhnRsU`TK#~`)$icsvKh~(pl=3p6m98@k3P#~upd=k*u20SNcb{l^1rUa)>qO997)pYRWMncC8A&&MHlbW?7i^7M`+B$hH~Y|J zd>FYOGQ;j>Zc2e7R{KK7)0>>nn_jYJy&o@sK!4G>-rLKM8Hv)f;hi1D2fAc$+six2 zyVZ@wZ6x|fJ!4KrpCJY=!Mq0;)X)OoS~{Lkh6u8J`eK%u0WtKh6B>GW_)PVc zl}-k`p09qwGtZ@VbYJC!>29V?Dr>>vk?)o(x?!z*9DJ||9qG-&G~#kXxbw{KKYy}J zQKa-dPt~M~E}V?PhW0R26xdA%1T*%ra6SguGu50YHngOTIv)@N|YttEXo#OZfgtP7;H?EeZZxo<}3YlYxtBq znJ!WFR^tmGf0Py}N?kZ(#=VtpC@%xJkDmfcCoBTxq zr_|5gP?u1@vJZbxPZ|G0AW4=tpb84gM2DpJU||(b8kMOV1S3|(yuwZJ&rIiFW(U;5 zUtAW`O6F6Zy+eZ1EDuP~AAHlSY-+A_eI5Gx)%*uro5tljy}kCZU*_d7)oJ>oQSZ3* zneTn`{gnNC&uJd)0aMBzAg021?YJ~b(fmkwZAd696a=0NzBAqBN54KuNDwa*no(^O z6p05bioXUR^uXjpTol*ppHp%1v9e)vkoUAUJyBx3lw0UO39b0?^{}yb!$yca(@DUn zCquRF?t=Zb9`Ed3AI6|L{eX~ijVH`VzSMheKoP7LSSf4g>md>`yi!TkoG5P>Ofp+n z(v~rW+(5L96L{vBb^g51B=(o)?%%xhvT*A5btOpw(TKh^g^4c zw>0%X!_0`{iN%RbVk+A^f{w-4-SSf*fu@FhruNL##F~sF24O~u zyYF<3el2b$$wZ_|uW#@Ak+VAGk#e|kS8nL1g>2B-SNMjMp^8;-FfeofY2fphFHO!{ z*!o4oTb{4e;S<|JEs<1_hPsmAlVNk?_5-Fp5KKU&d#FiNW~Y+pVFk@Cua1I{T+1|+ zHx6rFMor)7L)krbilqsWwy@T+g3DiH5MyVf8Wy}XbEaoFIDr~y;@r&I>FMW{ z?Q+(IgyebZ)-i4jNoXQhq4Muy9Fv+OxU;9_Jmn+<`mEC#%2Q_2bpcgzcinygNI!&^ z=V$)o2&Yz04~+&pPWWn`rrWxJ&}8khR)6B(--!9Q zubo}h+1T)>a@c)H^i``@<^j?|r4*{;tQf78(xn0g39IoZw0(CwY1f<%F>kEaJ zp9u|IeMY5mRdAlw*+gSN^5$Q)ShM<~E=(c8QM+T-Qk)FyKz#Sw0EJ*edYcuOtO#~Cx^(M7w5 z3)rl#L)rF|(Vun2LkFr!rg8Q@=r>9p>(t3Gf_auiJ2Xx9HmxYTa|=MH_SUlYL`mz9 zTTS$`%;D-|Jt}AP1&k7PcnfFNTH0A-*FmxstjBDiZX?}%u%Yq94$fUT&z6od+(Uk> zuqsld#G(b$G8tus=M!N#oPd|PVFX)?M?tCD0tS%2IGTfh}3YA3f&UM)W$_GNV8 zQo+a(ml2Km4o6O%gKTCSDNq+#zCTIQ1*`TIJh~k6Gp;htHBFnne))rlFdGqwC6dx2+La1&Mnko*352k0y z+tQcwndQlX`nc6nb$A9?<-o|r*%aWXV#=6PQic0Ok_D;q>wbv&j7cKc!w4~KF#-{6 z(S%6Za)WpGIWf7jZ3svNG5OLs0>vCL9{V7cgO%zevIVMH{WgP*^D9ws&OqA{yr|m| zKD4*07dGXshJHd#e%x%J+qmS^lS|0Bp?{drv;{@{l9ArPO&?Q5=?OO9=}h$oVe#3b z3Yofj&Cb}WC$PxmRRS)H%&$1-)z7jELS}!u!zQ?A^Y{Tv4QVt*vd@uj-^t2fYRzQj zfxGR>-q|o$3sGn^#VzZ!QQx?h9`njeJry}@x?|k0-GTTA4y3t2E`3DZ!A~D?GiJup z)8%PK2^9OVRlP(24P^4_<|D=H^7}WlWu#LgsdHzB%cPy|f8dD3|A^mh4WXxhLTVu_ z@abE{6Saz|Y{rXYPd4$tfPYo}ef(oQWZ=4Bct-=_9`#Qgp4ma$n$`tOwq#&E18$B; z@Bp)bn3&rEi0>fWWZ@7k5WazfoX`SCO4jQWwVuo+$PmSZn^Hz?O(-tW@*DGxuf)V1 zO_xm&;NVCaHD4dqt(-MlszI3F-p?0!-e$fbiCeuaw66h^TTDLWuaV<@C-`=Xe5WL) zwooG7h>4&*)p3pKMS3O!4>-4jQUN}iAMQ)2*70?hP~)TzzR?-f@?Aqy$$1Iy8VGG$ zMM?8;j!pUX7QQD$gRc_#+=raAS577ga-w?jd`vCiN5lu)dEUkkUPl9!?{$IJNxQys z*E4e$eF&n&+AMRQR2gcaFEjAy*r)G!s(P6D&TfoApMFC_*Ftx0|D0@E-=B7tezU@d zZ{hGiN;YLIoSeRS;9o%dEua4b%4R3;$SugDjP$x;Z!M!@QibuSBb)HY!3zJ7M;^jw zlx6AD50FD&p3JyP*>o+t9YWW8(7P2t!VQQ21pHJOcG_SXQD;(5aX#M6x##5H_Re>6lPyDCjxr*R(+HE%c&QN+b^tbT zXBJk?p)zhJj#I?&Y2n&~XiytG9!1ox;bw5Rbj~)7c(MFBb4>IiRATdhg zmiEFlj@S_hwYYI(ki{}&<;_7(Z0Qkfq>am z&LtL=2qc7rWguk3BtE4zL41@#S;NN*-jWw|7Kx7H7~_%7fPt;TIX}Ubo>;Rmj94V> zNB1=;-9AR7s`Pxn}t_6^3ahlq53e&!Lh85uG zec0vJY_6e`tg7LgfrJ3k!DjR)Bi#L@DHIrZ`sK=<5O0Ip!fxGf*OgGSpP@Hbbe&$9 z;ZI}8lEoC2_7;%L2=w?tb%1oL0V+=Z`7b=P&lNGY;yVBazXRYu;+cQDKvm*7NCxu&i;zub zAJh#11%?w>E2rf2e~C4+rAb-&$^vsdACs7 z@|Ra!OfVM(ke{vyiqh7puf&Yp6cd6{DptUteYfIRWG3pI+5< zBVBI_xkBAc<(pcb$!Y%dTW(b;B;2pOI-(QCsLv@U-D1XJ z(Gk8Q3l7Ws46Aktuj>|s{$6zA&xCPuXL-kB`CgYMs}4IeyG*P51IDwW?8UNQd+$i~ zlxOPtSi5L|gJcF@DwmJA5Ju8HEJ>o{{upwIpb!f{2(vLNBw`7xMbvcw<^{Fj@E~1( z?w`iIMieunS#>nXlmUcSMU+D3rX28f?s7z;X=se6bo8;5vM|O^(D6{A9*ChnGH!RG zP##3>LDC3jZPE4PH32AxrqPk|yIIrq~`aL-=}`okhNu9aT%q z1b)7iJ)CN=V#Ly84N_r7U^SH2FGdE5FpTO2 z630TF$P>GNMu8`rOytb(lB2};`;P4YNwW1<5d3Q~AX#P0aX}R2b2)`rgkp#zTxcGj zAV^cvFbhP|JgWrq_e`~exr~sIR$6p5V?o4Wym3kQ3HA+;Pr$bQ0(PmADVO%MKL!^q z?zAM8j1l4jrq|5X+V!8S*2Wl@=7*pPgciTVK6kS1Ge zMsd_u6DFK$jTnvVtE;qa+8(1sGBu~n&F%dh(&c(Zs4Fc#A=gG^^%^AyH}1^?|8quj zl@Z47h$){PlELJgYZCIHHL= z{U8O>Tw4x3<1{?$8>k-P<}1y9DmAZP_;(3Y*{Sk^H^A=_iSJ@+s5ktgwTXz_2$~W9>VVZsfwCm@s0sQ zeB50_yu@uS+e7QoPvdCwDz{prjo(AFwR%C?z`EL{1`|coJHQTk^nX=tvs1<0arUOJ z!^`*x&&BvTYmemyZ)2p~{%eYX=JVR?DYr(rNgqRMA5E1PR1Iw=prk=L2ldy3r3Vg@27IZx43+ywyzr-X*p*d@tZV+!U#~$-q=8c zgdSuh#r?b4GhEGNai)ayHQpk>5(%j5c@C1K3(W1pb~HeHpaqijJZa-e6vq_8t-^M^ zBJxq|MqZc?pjXPIH}70a5vt!IUh;l}<>VX<-Qcv^u@5(@@M2CHSe_hD$VG-eiV^V( zj7*9T0?di?P$FaD6oo?)<)QT>Npf6Og!GO^GmPV(Km0!=+dE&bk#SNI+C9RGQ|{~O*VC+tXK3!n`5 zHfl6>lwf_aEVV3`0T!aHNZLsj$paS$=LL(?b!Czaa5bbSuZ6#$_@LK<(7yrrl+80| z{tOFd=|ta2Z`^ssozD9BINn45NxUeCQis?-BKmU*Kt=FY-NJ+)8S1ecuFtN-M?&42 zl2$G>u!iNhAk*HoJ^4v^9#ORYp5t^wDj6|lx~5w45#E5wVqI1JQ~9l?nPp1YINf++ zMAdSif~_ETv@Er(EFBI^@L4BULFW>)NI+ejHFP*T}UhWNN`I)RRS8za? z*@`1>9ZB}An%aT5K=_2iQmfE;GcBVHLF!$`I99o5GO`O%O_zLr9AG18>&^HkG(;=V z%}c!OBQ~?MX(9h~tajX{=x)+!cbM7$YzTlmsPOdp2L-?GoW`@{lY9U3f;OUo*BwRB z8A+nv(br0-SH#VxGy#ZrgnGD(=@;HME;yd46EgWJ`EL%oXc&lFpc@Y}^>G(W>h_v_ zlN!`idhX+OjL+~T?19sroAFVGfa5tX-D49w$1g2g_-T|EpHL6}K_aX4$K=LTvwtlF zL*z}j{f+Uoe7{-px3_5iKPA<_7W=>Izkk)!l9ez2w%vi(?Y;i8AxRNLSOGDzNoqoI zP!1uAl}r=_871(G?y`i&)-7{u=%nxk7CZ_Qh#!|ITec zwQn`33GTUM`;D2POWnkqngqJhJRlM>CTONzTG}>^Q0wUunQyn|TAiHzyX2_%ATx%P z%7gW)%4rA9^)M<_%k@`Y?RbC<29sWU&5;@|9thf2#zf8z12$hRcZ!CSb>kUp=4N#y zl3hE#y6>kkA8VY2`W`g5Ip?2qC_BY$>R`iGQLhz2-S>x(RuWv)SPaGdl^)gGw7tjR zH@;jwk!jIaCgSg_*9iF|a);sRUTq30(8I(obh^|}S~}P4U^BIGYqcz;MPpC~Y@k_m zaw4WG1_vz2GdCAX!$_a%GHK**@IrHSkGoN>)e}>yzUTm52on`hYot7cB=oA-h1u|R ztH$11t?54Qg2L+i33FPFKKRm1aOjKST{l1*(nps`>sv%VqeVMWjl5+Gh+9);hIP8? zA@$?}Sc z3qIRpba+y5yf{R6G(u8Z^vkg0Fu&D-7?1s=QZU`Ub{-!Y`I?AGf1VNuc^L3v>)>i# z{DV9W$)>34wnzAXUiV^ZpYKw>UElrN_5Xj6{r_3| z$X5PK`e5$7>~9Dj7gK5ash(dvs`vwfk}&RD`>04;j62zoXESkFBklYaKm5seyiX(P zqQ-;XxlV*yg?Dhlx%xt!b0N3GHp@(p$A;8|%# zZ5m2KL|{on4nr>2_s9Yh=r5ScQ0;aMF)G$-9-Ca6%wA`Pa)i?NGFA|#Yi?{X-4ZO_ z^}%7%vkzvUHa$-^Y#aA+aiR5sa%S|Ebyn`EV<3Pc?ax_f>@sBZF1S;7y$CXd5t5=WGsTKBk8$OfH4v|0?0I=Yp}7c=WBSCg!{0n)XmiU;lfx)**zZaYqmDJelxk$)nZyx5`x$6R|fz(;u zEje5Dtm|a%zK!!tk3{i9$I2b{vXNFy%Bf{50X!x{98+BsDr_u9i>G5%*sqEX|06J0 z^IY{UcEbj6LDwuMh7cH`H@9sVt1l1#8kEQ(LyT@&+K}(ReE`ux8gb0r6L_#bDUo^P z3Ka2lRo52Hdtl_%+pwVs14=q`{d^L58PsU@AMf(hENumaxM{7iAT5sYmWh@hQCO^ zK&}ijo=`VqZ#a3vE?`7QW0ZREL17ZvDfdqKGD?0D4fg{7v%|Yj&_jcKJAB)>=*RS* zto8p6@k%;&^ZF>hvXm&$PCuEp{uqw3VPG$9VMdW5$w-fy2CNNT>E;>ejBgy-m_6`& z97L1p{%srn@O_JQgFpa_#f(_)eb#YS>o>q3(*uB;uZb605(iqM$=NK{nHY=+X2*G) zO3-_Xh%aG}fHWe*==58zBwp%&`mge<8uq8;xIxOd=P%9EK!34^E9sk|(Zq1QSz-JVeP12Fp)-`F|KY$LPwUE?rku zY@OJ)Z9A!ojfzfeyJ9;zv2EM7ZQB)AR5xGa-tMn^bl)FmoIiVyJ@!~@%{}qXXD&Ns zPnfe5U+&ohKefILu_1mPfLGuapX@btta5C#gPB2cjk5m4T}Nfi+Vfka!Yd(L?-c~5 z#ZK4VeQEXNPc4r$K00Fg>g#_W!YZ)cJ?JTS<&68_$#cZT-ME`}tcwqg3#``3M3UPvn+pi}(VNNx6y zFIMVb6OwYU(2`at$gHba*qrMVUl8xk5z-z~fb@Q3Y_+aXuEKH}L+>eW__!IAd@V}L zkw#s%H0v2k5-=vh$^vPCuAi22Luu3uKTf6fPo?*nvj$9(u)4$6tvF-%IM+3pt*cgs z_?wW}J7VAA{_~!?))?s6{M=KPpVhg4fNuU*|3THp@_(q!b*hdl{fjRVFWtu^1dV(f z6iOux9hi&+UK=|%M*~|aqFK{Urfl!TA}UWY#`w(0P!KMe1Si{8|o))Gy6d7;!JQYhgMYmXl?3FfOM2nQGN@~Ap6(G z3+d_5y@=nkpKAhRqf{qQ~k7Z$v&l&@m7Ppt#FSNzKPZM z8LhihcE6i=<(#87E|Wr~HKvVWhkll4iSK$^mUHaxgy8*K$_Zj;zJ`L$naPj+^3zTi z-3NTaaKnD5FPY-~?Tq6QHnmDDRxu0mh0D|zD~Y=vv_qig5r-cIbCpxlju&8Sya)@{ zsmv6XUSi)@(?PvItkiZEeN*)AE~I_?#+Ja-r8$(XiXei2d@Hi7Rx8+rZZb?ZLa{;@*EHeRQ-YDadz~M*YCM4&F-r;E#M+@CSJMJ0oU|PQ^ z=E!HBJDMQ2TN*Y(Ag(ynAL8%^v;=~q?s4plA_hig&5Z0x_^Oab!T)@6kRN$)qEJ6E zNuQjg|G7iwU(N8pI@_6==0CL;lRh1dQF#wePhmu@hADFd3B5KIH#dx(2A zp~K&;Xw}F_N6CU~0)QpQk7s$a+LcTOj1%=WXI(U=Dv!6 z{#<#-)2+gCyyv=Jw?Ab#PVkxPDeH|sAxyG`|Ys}A$PW4TdBv%zDz z^?lwrxWR<%Vzc8Sgt|?FL6ej_*e&rhqJZ3Y>k=X(^dytycR;XDU16}Pc9Vn0>_@H+ zQ;a`GSMEG64=JRAOg%~L)x*w{2re6DVprNp+FcNra4VdNjiaF0M^*>CdPkt(m150rCue?FVdL0nFL$V%5y6N z%eLr5%YN7D06k5ji5*p4v$UMM)G??Q%RB27IvH7vYr_^3>1D-M66#MN8tWGw>WED} z5AhlsanO=STFYFs)Il_0i)l)f<8qn|$DW7ZXhf5xI;m+7M5-%P63XFQrG9>DMqHc} zsgNU9nR`b}E^mL5=@7<1_R~j@q_2U^3h|+`7YH-?C=vme1C3m`Fe0HC>pjt6f_XMh zy~-i-8R46QNYneL4t@)<0VU7({aUO?aH`z4V2+kxgH5pYD5)wCh75JqQY)jIPN=U6 z+qi8cGiOtXG2tXm;_CfpH9ESCz#i5B(42}rBJJF$jh<1sbpj^8&L;gzGHb8M{of+} zzF^8VgML2O9nxBW7AvdEt90vp+#kZxWf@A)o9f9}vKJy9NDBjBW zSt=Hcs=YWCwnfY1UYx*+msp{g!w0HC<_SM!VL1(I2PE?CS}r(eh?{I)mQixmo5^p# zV?2R!R@3GV6hwTCrfHiK#3Orj>I!GS2kYhk1S;aFBD_}u2v;0HYFq}Iz1Z(I4oca4 zxquja8$+8JW_EagDHf$a1OTk5S97umGSDaj)gH=fLs9>_=XvVj^Xj9a#gLdk=&3tl zfmK9MNnIX9v{?%xdw7568 zNrZ|roYs(vC4pHB5RJ8>)^*OuyNC>x7ad)tB_}3SgQ96+-JT^Qi<`xi=)_=$Skwv~ zdqeT9Pa`LYvCAn&rMa2aCDV(TMI#PA5g#RtV|CWpgDYRA^|55LLN^uNh*gOU>Z=a06qJ;$C9z8;n-Pq=qZnc1zUwJ@t)L;&NN+E5m zRkQ(SeM8=l-aoAKGKD>!@?mWTW&~)uF2PYUJ;tB^my`r9n|Ly~0c%diYzqs9W#FTjy?h&X3TnH zXqA{QI82sdjPO->f=^K^f>N`+B`q9&rN0bOXO79S&a9XX8zund(kW7O76f4dcWhIu zER`XSMSFbSL>b;Rp#`CuGJ&p$s~G|76){d?xSA5wVg##_O0DrmyEYppyBr%fyWbbv zp`K84JwRNP$d-pJ!Qk|(RMr?*!wi1if-9G#0p>>1QXKXWFy)eB3ai)l3601q8!9JC zvU#ZWWDNKq9g6fYs?JQ)Q4C_cgTy3FhgKb8s&m)DdmL5zhNK#8wWg!J*7G7Qhe9VU zha?^AQTDpYcuN!B+#1dE*X{<#!M%zfUQbj=zLE{dW0XeQ7-oIsGY6RbkP2re@Q{}r_$iiH0xU%iN*ST`A)-EH6eaZB$GA#v)cLi z*MpA(3bYk$oBDKAzu^kJoSUsDd|856DApz={3u8sbQV@JnRkp2nC|)m;#T=DvIL-O zI4vh;g7824l}*`_p@MT4+d`JZ2%6NQh=N9bmgJ#q!hK@_<`HQq3}Z8Ij>3%~<*= zcv=!oT#5xmeGI92lqm9sGVE%#X$ls;St|F#u!?5Y7syhx6q#MVRa&lBmmn%$C0QzU z);*ldgwwCmzM3uglr}!Z2G+?& zf%Dpo&mD%2ZcNFiN-Z0f;c_Q;A%f@>26f?{d1kxIJD}LxsQkB47SAdwinfMILZdN3 zfj^HmTzS3Ku5BxY>ANutS8WPQ-G>v4^_Qndy==P3pDm+Xc?>rUHl-4+^%Sp5atOja z2oP}ftw-rqnb}+khR3CrRg^ibi6?QYk1*i^;kQGirQ=uB9Sd1NTfT-Rbv;hqnY4neE5H1YUrjS2m+2&@uXiAo- zrKUX|Ohg7(6F(AoP~tj;NZlV#xsfo-5reuQHB$&EIAhyZk;bL;k9ouDmJNBAun;H& zn;Of1z_Qj`x&M;5X;{s~iGzBQTY^kv-k{ksbE*Dl%Qf%N@hQCfY~iUw!=F-*$cpf2 z3wix|aLBV0b;W@z^%7S{>9Z^T^fLOI68_;l@+Qzaxo`nAI8emTV@rRhEKZ z?*z_{oGdI~R*#<2{bkz$G~^Qef}$*4OYTgtL$e9q!FY7EqxJ2`zk6SQc}M(k(_MaV zSLJnTXw&@djco1~a(vhBl^&w=$fa9{Sru>7g8SHahv$&Bl(D@(Zwxo_3r=;VH|uc5 zi1Ny)J!<(KN-EcQ(xlw%PNwK8U>4$9nVOhj(y0l9X^vP1TA>r_7WtSExIOsz`nDOP zs}d>Vxb2Vo2e5x8p(n~Y5ggAyvib>d)6?)|E@{FIz?G3PVGLf7-;BxaP;c?7ddH$z zA+{~k^V=bZuXafOv!RPsE1GrR3J2TH9uB=Z67gok+u`V#}BR86hB1xl}H4v`F+mRfr zYhortD%@IGfh!JB(NUNSDh+qDz?4ztEgCz&bIG-Wg7w-ua4ChgQR_c+z8dT3<1?uX z*G(DKy_LTl*Ea!%v!RhpCXW1WJO6F`bgS-SB;Xw9#! z<*K}=#wVu9$`Yo|e!z-CPYH!nj7s9dEPr-E`DXUBu0n!xX~&|%#G=BeM?X@shQQMf zMvr2!y7p_gD5-!Lnm|a@z8Of^EKboZsTMk%5VsJEm>VsJ4W7Kv{<|#4f-qDE$D-W>gWT%z-!qXnDHhOvLk=?^a1*|0j z{pW{M0{#1VcR5;F!!fIlLVNh_Gj zbnW(_j?0c2q$EHIi@fSMR{OUKBcLr{Y&$hrM8XhPByyZaXy|dd&{hYQRJ9@Fn%h3p7*VQolBIV@Eq`=y%5BU~3RPa^$a?ixp^cCg z+}Q*X+CW9~TL29@OOng(#OAOd!)e$d%sr}^KBJ-?-X&|4HTmtemxmp?cT3uA?md4% zT8yZ0U;6Rg6JHy3fJae{6TMGS?ZUX6+gGTT{Q{)SI85$5FD{g-eR%O0KMpWPY`4@O zx!hen1*8^E(*}{m^V_?}(b5k3hYo=T+$&M32+B`}81~KKZhY;2H{7O-M@vbCzuX0n zW-&HXeyr1%I3$@ns-V1~Lb@wIpkmx|8I~ob1Of7i6BTNysEwI}=!nU%q7(V_^+d*G z7G;07m(CRTJup!`cdYi93r^+LY+`M*>aMuHJm(A8_O8C#A*$!Xvddgpjx5)?_EB*q zgE8o5O>e~9IiSC@WtZpF{4Bj2J5eZ>uUzY%TgWF7wdDE!fSQIAWCP)V{;HsU3ap?4 znRsiiDbtN7i9hapO;(|Ew>Ip2TZSvK9Z^N21%J?OiA_&eP1{(Pu_=%JjKy|HOardq ze?zK^K zA%sjF64*Wufad%H<) z^|t>e*h+Z1#l=5wHexzt9HNDNXgM=-OPWKd^5p!~%SIl>Fo&7BvNpbf8{NXmH)o{r zO=aBJ;meX1^{O%q;kqdw*5k!Y7%t_30 zy{nGRVc&5qt?dBwLs+^Sfp;f`YVMSB#C>z^a9@fpZ!xb|b-JEz1LBX7ci)V@W+kvQ89KWA0T~Lj$aCcfW#nD5bt&Y_< z-q{4ZXDqVg?|0o)j1%l0^_it0WF*LCn-+)c!2y5yS7aZIN$>0LqNnkujV*YVes(v$ zY@_-!Q;!ZyJ}Bg|G-~w@or&u0RO?vlt5*9~yeoPV_UWrO2J54b4#{D(D>jF(R88u2 zo#B^@iF_%S>{iXSol8jpmsZuJ?+;epg>k=$d`?GSegAVp3n$`GVDvK${N*#L_1`44 z{w0fL{2%)0|E+qgZtjX}itZz^KJt4Y;*8uSK}Ft38+3>j|K(PxIXXR-t4VopXo#9# zt|F{LWr-?34y`$nLBVV_*UEgA6AUI65dYIbqpNq9cl&uLJ0~L}<=ESlOm?Y-S@L*d z<7vt}`)TW#f%Rp$Q}6@3=j$7Tze@_uZO@aMn<|si{?S}~maII`VTjs&?}jQ4_cut9$)PEqMukwoXobzaKx^MV z2fQwl+;LSZ$qy%Tys0oo^K=jOw$!YwCv^ei4NBVauL)tN%=wz9M{uf{IB(BxK|lT*pFkmNK_1tV`nb%jH=a0~VNq2RCKY(rG7jz!-D^k)Ec)yS%17pE#o6&eY+ z^qN(hQT$}5F(=4lgNQhlxj?nB4N6ntUY6(?+R#B?W3hY_a*)hnr4PA|vJ<6p`K3Z5Hy z{{8(|ux~NLUW=!?9Qe&WXMTAkQnLXg(g=I@(VG3{HE13OaUT|DljyWXPs2FE@?`iU z4GQlM&Q=T<4&v@Fe<+TuXiZQT3G~vZ&^POfmI1K2h6t4eD}Gk5XFGpbj1n_g*{qmD6Xy z`6Vv|lLZtLmrnv*{Q%xxtcWVj3K4M%$bdBk_a&ar{{GWyu#ljM;dII;*jP;QH z#+^o-A4np{@|Mz+LphTD0`FTyxYq#wY)*&Ls5o{0z9yg2K+K7ZN>j1>N&;r+Z`vI| zDzG1LJZ+sE?m?>x{5LJx^)g&pGEpY=fQ-4}{x=ru;}FL$inHemOg%|R*ZXPodU}Kh zFEd5#+8rGq$Y<_?k-}r5zgQ3jRV=ooHiF|@z_#D4pKVEmn5CGV(9VKCyG|sT9nc=U zEoT67R`C->KY8Wp-fEcjjFm^;Cg(ls|*ABVHq8clBE(;~K^b+S>6uj70g? z&{XQ5U&!Z$SO7zfP+y^8XBbiu*Cv-yJG|l-oe*!s5$@Lh_KpxYL2sx`B|V=dETN>5K+C+CU~a_3cI8{vbu$TNVdGf15*>D zz@f{zIlorkY>TRh7mKuAlN9A0>N>SV`X)+bEHms=mfYTMWt_AJtz_h+JMmrgH?mZt zm=lfdF`t^J*XLg7v+iS)XZROygK=CS@CvUaJo&w2W!Wb@aa?~Drtf`JV^cCMjngVZ zv&xaIBEo8EYWuML+vxCpjjY^s1-ahXJzAV6hTw%ZIy!FjI}aJ+{rE&u#>rs)vzuxz z+$5z=7W?zH2>Eb32dvgHYZtCAf!=OLY-pb4>Ae79rd68E2LkVPj-|jFeyqtBCCwiW zkB@kO_(3wFq)7qwV}bA=zD!*@UhT`geq}ITo%@O(Z5Y80nEX~;0-8kO{oB6|(4fQh z);73T!>3@{ZobPwRv*W?7m0Ml9GmJBCJd&6E?hdj9lV= z4flNfsc(J*DyPv?RCOx!MSvk(M952PJ-G|JeVxWVjN~SNS6n-_Ge3Q;TGE;EQvZg86%wZ`MB zSMQua(i*R8a75!6$QRO^(o7sGoomb+Y{OMy;m~Oa`;P9Yqo>?bJAhqXxLr7_3g_n>f#UVtxG!^F#1+y@os6x(sg z^28bsQ@8rw%Gxk-stAEPRbv^}5sLe=VMbkc@Jjimqjvmd!3E7+QnL>|(^3!R} zD-l1l7*Amu@j+PWLGHXXaFG0Ct2Q=}5YNUxEQHCAU7gA$sSC<5OGylNnQUa>>l%sM zyu}z6i&({U@x^hln**o6r2s-(C-L50tQvz|zHTqW!ir?w&V23tuYEDJVV#5pE|OJu z7^R!A$iM$YCe?8n67l*J-okwfZ+ZTkGvZ)tVPfR;|3gyFjF)8V zyXXN=!*bpyRg9#~Bg1+UDYCt0 ztp4&?t1X0q>uz;ann$OrZs{5*r`(oNvw=$7O#rD|Wuv*wIi)4b zGtq4%BX+kkagv3F9Id6~-c+1&?zny%w5j&nk9SQfo0k4LhdSU_kWGW7axkfpgR`8* z!?UTG*Zi_baA1^0eda8S|@&F z{)Rad0kiLjB|=}XFJhD(S3ssKlveFFmkN{Vl^_nb!o5M!RC=m)V&v2%e?ZoRC@h3> zJ(?pvToFd`*Zc@HFPL#=otWKwtuuQ_dT-Hr{S%pQX<6dqVJ8;f(o)4~VM_kEQkMR+ zs1SCVi~k>M`u1u2xc}>#D!V&6nOOh-E$O&SzYrjJdZpaDv1!R-QGA141WjQe2s0J~ zQ;AXG)F+K#K8_5HVqRoRM%^EduqOnS(j2)|ctA6Q^=|s_WJYU;Z%5bHp08HPL`YF2 zR)Ad1z{zh`=sDs^&V}J z%$Z$!jd7BY5AkT?j`eqMs%!Gm@T8)4w3GYEX~IwgE~`d|@T{WYHkudy(47brgHXx& zBL1yFG6!!!VOSmDxBpefy2{L_u5yTwja&HA!mYA#wg#bc-m%~8aRR|~AvMnind@zs zy>wkShe5&*un^zvSOdlVu%kHsEo>@puMQ`b1}(|)l~E{5)f7gC=E$fP(FC2=F<^|A zxeIm?{EE!3sO!Gr7e{w)Dx(uU#3WrFZ>ibmKSQ1tY?*-Nh1TDHLe+k*;{Rp!Bmd_m zb#^kh`Y*8l|9Cz2e{;RL%_lg{#^Ar+NH|3z*Zye>!alpt{z;4dFAw^^H!6ING*EFc z_yqhr8d!;%nHX9AKhFQZBGrSzfzYCi%C!(Q5*~hX>)0N`vbhZ@N|i;_972WSx*>LH z87?en(;2_`{_JHF`Sv6Wlps;dCcj+8IJ8ca6`DsOQCMb3n# z3)_w%FuJ3>fjeOOtWyq)ag|PmgQbC-s}KRHG~enBcIwqIiGW8R8jFeBNY9|YswRY5 zjGUxdGgUD26wOpwM#8a!Nuqg68*dG@VM~SbOroL_On0N6QdT9?)NeB3@0FCC?Z|E0 z6TPZj(AsPtwCw>*{eDEE}Gby>0q{*lI+g2e&(YQrsY&uGM{O~}(oM@YWmb*F zA0^rr5~UD^qmNljq$F#ARXRZ1igP`MQx4aS6*MS;Ot(1L5jF2NJ;de!NujUYg$dr# z=TEL_zTj2@>ZZN(NYCeVX2==~=aT)R30gETO{G&GM4XN<+!&W&(WcDP%oL8PyIVUC zs5AvMgh6qr-2?^unB@mXK*Dbil^y-GTC+>&N5HkzXtozVf93m~xOUHn8`HpX=$_v2 z61H;Z1qK9o;>->tb8y%#4H)765W4E>TQ1o0PFj)uTOPEvv&}%(_mG0ISmyhnQV33Z$#&yd{ zc{>8V8XK$3u8}04CmAQ#I@XvtmB*s4t8va?-IY4@CN>;)mLb_4!&P3XSw4pA_NzDb zORn!blT-aHk1%Jpi>T~oGLuh{DB)JIGZ9KOsciWs2N7mM1JWM+lna4vkDL?Q)z_Ct z`!mi0jtr+4*L&N7jk&LodVO#6?_qRGVaucqVB8*us6i3BTa^^EI0x%EREQSXV@f!lak6Wf1cNZ8>*artIJ(ADO*=<-an`3zB4d*oO*8D1K!f z*A@P1bZCNtU=p!742MrAj%&5v%Xp_dSX@4YCw%F|%Dk=u|1BOmo)HsVz)nD5USa zR~??e61sO(;PR)iaxK{M%QM_rIua9C^4ppVS$qCT9j2%?*em?`4Z;4@>I(c%M&#cH z>4}*;ej<4cKkbCAjjDsyKS8rIm90O)Jjgyxj5^venBx&7B!xLmzxW3jhj7sR(^3Fz z84EY|p1NauwXUr;FfZjdaAfh%ivyp+^!jBjJuAaKa!yCq=?T_)R!>16?{~p)FQ3LDoMyG%hL#pR!f@P%*;#90rs_y z@9}@r1BmM-SJ#DeuqCQk=J?ixDSwL*wh|G#us;dd{H}3*-Y7Tv5m=bQJMcH+_S`zVtf;!0kt*(zwJ zs+kedTm!A}cMiM!qv(c$o5K%}Yd0|nOd0iLjus&;s0Acvoi-PFrWm?+q9f^FslxGi z6ywB`QpL$rJzWDg(4)C4+!2cLE}UPCTBLa*_=c#*$b2PWrRN46$y~yST3a2$7hEH= zNjux+wna^AzQ=KEa_5#9Ph=G1{S0#hh1L3hQ`@HrVnCx{!fw_a0N5xV(iPdKZ-HOM za)LdgK}1ww*C_>V7hbQnTzjURJL`S%`6nTHcgS+dB6b_;PY1FsrdE8(2K6FN>37!62j_cBlui{jO^$dPkGHV>pXvW0EiOA zqW`YaSUBWg_v^Y5tPJfWLcLpsA8T zG)!x>pKMpt!lv3&KV!-um= zKCir6`bEL_LCFx4Z5bAFXW$g3Cq`?Q%)3q0r852XI*Der*JNuKUZ`C{cCuu8R8nkt z%pnF>R$uY8L+D!V{s^9>IC+bmt<05h**>49R*#vpM*4i0qRB2uPbg8{{s#9yC;Z18 zD7|4m<9qneQ84uX|J&f-g8a|nFKFt34@Bt{CU`v(SYbbn95Q67*)_Esl_;v291s=9 z+#2F2apZU4Tq=x+?V}CjwD(P=U~d<=mfEFuyPB`Ey82V9G#Sk8H_Ob_RnP3s?)S_3 zr%}Pb?;lt_)Nf>@zX~D~TBr;-LS<1I##8z`;0ZCvI_QbXNh8Iv)$LS=*gHr;}dgb=w5$3k2la1keIm|=7<-JD>)U%=Avl0Vj@+&vxn zt-)`vJxJr88D&!}2^{GPXc^nmRf#}nb$4MMkBA21GzB`-Or`-3lq^O^svO7Vs~FdM zv`NvzyG+0T!P8l_&8gH|pzE{N(gv_tgDU7SWeiI-iHC#0Ai%Ixn4&nt{5y3(GQs)i z&uA;~_0shP$0Wh0VooIeyC|lak__#KVJfxa7*mYmZ22@(<^W}FdKjd*U1CqSjNKW% z*z$5$=t^+;Ui=MoDW~A7;)Mj%ibX1_p4gu>RC}Z_pl`U*{_z@+HN?AF{_W z?M_X@o%w8fgFIJ$fIzBeK=v#*`mtY$HC3tqw7q^GCT!P$I%=2N4FY7j9nG8aIm$c9 zeKTxVKN!UJ{#W)zxW|Q^K!3s;(*7Gbn;e@pQBCDS(I|Y0euK#dSQ_W^)sv5pa%<^o zyu}3d?Lx`)3-n5Sy9r#`I{+t6x%I%G(iewGbvor&I^{lhu-!#}*Q3^itvY(^UWXgvthH52zLy&T+B)Pw;5>4D6>74 zO_EBS)>l!zLTVkX@NDqyN2cXTwsUVao7$HcqV2%t$YzdAC&T)dwzExa3*kt9d(}al zA~M}=%2NVNUjZiO7c>04YH)sRelXJYpWSn^aC$|Ji|E13a^-v2MB!Nc*b+=KY7MCm zqIteKfNkONq}uM;PB?vvgQvfKLPMB8u5+Am=d#>g+o&Ysb>dX9EC8q?D$pJH!MTAqa=DS5$cb+;hEvjwVfF{4;M{5U&^_+r zvZdu_rildI!*|*A$TzJ&apQWV@p{!W`=?t(o0{?9y&vM)V)ycGSlI3`;ps(vf2PUq zX745#`cmT*ra7XECC0gKkpu2eyhFEUb?;4@X7weEnLjXj_F~?OzL1U1L0|s6M+kIhmi%`n5vvDALMagi4`wMc=JV{XiO+^ z?s9i7;GgrRW{Mx)d7rj)?(;|b-`iBNPqdwtt%32se@?w4<^KU&585_kZ=`Wy^oLu9 z?DQAh5z%q;UkP48jgMFHTf#mj?#z|=w= z(q6~17Vn}P)J3M?O)x))%a5+>TFW3No~TgP;f}K$#icBh;rSS+R|}l鯊%1Et zwk~hMkhq;MOw^Q5`7oC{CUUyTw9x>^%*FHx^qJw(LB+E0WBX@{Ghw;)6aA-KyYg8p z7XDveQOpEr;B4je@2~usI5BlFadedX^ma{b{ypd|RNYqo#~d*mj&y`^iojR}s%~vF z(H!u`yx68D1Tj(3(m;Q+Ma}s2n#;O~bcB1`lYk%Irx60&-nWIUBr2x&@}@76+*zJ5 ze&4?q8?m%L9c6h=J$WBzbiTf1Z-0Eb5$IZs>lvm$>1n_Mezp*qw_pr8<8$6f)5f<@ zyV#tzMCs51nTv_5ca`x`yfE5YA^*%O_H?;tWYdM_kHPubA%vy47i=9>Bq) zRQ&0UwLQHeswmB1yP)+BiR;S+Vc-5TX84KUA;8VY9}yEj0eESSO`7HQ4lO z4(CyA8y1G7_C;6kd4U3K-aNOK!sHE}KL_-^EDl(vB42P$2Km7$WGqNy=%fqB+ zSLdrlcbEH=T@W8V4(TgoXZ*G1_aq$K^@ek=TVhoKRjw;HyI&coln|uRr5mMOy2GXP zwr*F^Y|!Sjr2YQXX(Fp^*`Wk905K%$bd03R4(igl0&7IIm*#f`A!DCarW9$h$z`kYk9MjjqN&5-DsH@8xh63!fTNPxWsFQhNv z#|3RjnP$Thdb#Ys7M+v|>AHm0BVTw)EH}>x@_f4zca&3tXJhTZ8pO}aN?(dHo)44Z z_5j+YP=jMlFqwvf3lq!57-SAuRV2_gJ*wsR_!Y4Z(trO}0wmB9%f#jNDHPdQGHFR; zZXzS-$`;7DQ5vF~oSgP3bNV$6Z(rwo6W(U07b1n3UHqml>{=6&-4PALATsH@Bh^W? z)ob%oAPaiw{?9HfMzpGb)@Kys^J$CN{uf*HX?)z=g`J(uK1YO^8~s1(ZIbG%Et(|q z$D@_QqltVZu9Py4R0Ld8!U|#`5~^M=b>fnHthzKBRr=i+w@0Vr^l|W;=zFT#PJ?*a zbC}G#It}rQP^Ait^W&aa6B;+0gNvz4cWUMzpv(1gvfw-X4xJ2Sv;mt;zb2Tsn|kSS zo*U9N?I{=-;a-OybL4r;PolCfiaL=y@o9{%`>+&FI#D^uy#>)R@b^1ue&AKKwuI*` zx%+6r48EIX6nF4o;>)zhV_8(IEX})NGU6Vs(yslrx{5fII}o3SMHW7wGtK9oIO4OM&@@ECtXSICLcPXoS|{;=_yj>hh*%hP27yZwOmj4&Lh z*Nd@OMkd!aKReoqNOkp5cW*lC)&C$P?+H3*%8)6HcpBg&IhGP^77XPZpc%WKYLX$T zsSQ$|ntaVVOoRat$6lvZO(G-QM5s#N4j*|N_;8cc2v_k4n6zx9c1L4JL*83F-C1Cn zaJhd;>rHXB%%ZN=3_o3&Qd2YOxrK~&?1=UuN9QhL$~OY-Qyg&})#ez*8NpQW_*a&kD&ANjedxT0Ar z<6r{eaVz3`d~+N~vkMaV8{F?RBVemN(jD@S8qO~L{rUw#=2a$V(7rLE+kGUZ<%pdr z?$DP|Vg#gZ9S}w((O2NbxzQ^zTot=89!0^~hE{|c9q1hVzv0?YC5s42Yx($;hAp*E zyoGuRyphQY{Q2ee0Xx`1&lv(l-SeC$NEyS~8iil3_aNlnqF_G|;zt#F%1;J)jnPT& z@iU0S;wHJ2$f!juqEzPZeZkjcQ+Pa@eERSLKsWf=`{R@yv7AuRh&ALRTAy z8=g&nxsSJCe!QLchJ=}6|LshnXIK)SNd zRkJNiqHwKK{SO;N5m5wdL&qK`v|d?5<4!(FAsDxR>Ky#0#t$8XCMptvNo?|SY?d8b z`*8dVBlXTUanlh6n)!EHf2&PDG8sXNAt6~u-_1EjPI1|<=33T8 zEnA00E!`4Ave0d&VVh0e>)Dc}=FfAFxpsC1u9ATfQ`-Cu;mhc8Z>2;uyXtqpLb7(P zd2F9<3cXS} znMg?{&8_YFTGRQZEPU-XPq55%51}RJpw@LO_|)CFAt62-_!u_Uq$csc+7|3+TV_!h z+2a7Yh^5AA{q^m|=KSJL+w-EWDBc&I_I1vOr^}P8i?cKMhGy$CP0XKrQzCheG$}G# zuglf8*PAFO8%xop7KSwI8||liTaQ9NCAFarr~psQt)g*pC@9bORZ>m`_GA`_K@~&% zijH0z;T$fd;-Liw8%EKZas>BH8nYTqsK7F;>>@YsE=Rqo?_8}UO-S#|6~CAW0Oz1} z3F(1=+#wrBJh4H)9jTQ_$~@#9|Bc1Pd3rAIA_&vOpvvbgDJOM(yNPhJJq2%PCcMaI zrbe~toYzvkZYQ{ea(Wiyu#4WB#RRN%bMe=SOk!CbJZv^m?Flo5p{W8|0i3`hI3Np# zvCZqY%o258CI=SGb+A3yJe~JH^i{uU`#U#fvSC~rWTq+K`E%J@ zasU07&pB6A4w3b?d?q}2=0rA#SA7D`X+zg@&zm^iA*HVi z009#PUH<%lk4z~p^l0S{lCJk1Uxi=F4e_DwlfHA`X`rv(|JqWKAA5nH+u4Da+E_p+ zVmH@lg^n4ixs~*@gm_dgQ&eDmE1mnw5wBz9Yg?QdZwF|an67Xd*x!He)Gc8&2!urh z4_uXzbYz-aX)X1>&iUjGp;P1u8&7TID0bTH-jCL&Xk8b&;;6p2op_=y^m@Nq*0{#o!!A;wNAFG@0%Z9rHo zcJs?Th>Ny6+hI`+1XoU*ED$Yf@9f91m9Y=#N(HJP^Y@ZEYR6I?oM{>&Wq4|v0IB(p zqX#Z<_3X(&{H+{3Tr|sFy}~=bv+l=P;|sBz$wk-n^R`G3p0(p>p=5ahpaD7>r|>pm zv;V`_IR@tvZreIuv2EM7ZQHhO+qUgw#kOs%*ekY^n|=1#x9&c;Ro&I~{rG-#_3ZB1 z?|9}IFdbP}^DneP*T-JaoYHt~r@EfvnPE5EKUwIxjPbsr$% zfWW83pgWST7*B(o=kmo)74$8UU)v0{@4DI+ci&%=#90}!CZz|rnH+Mz=HN~97G3~@ z;v5(9_2%eca(9iu@J@aqaMS6*$TMw!S>H(b z4(*B!|H|8&EuB%mITr~O?vVEf%(Gr)6E=>H~1VR z&1YOXluJSG1!?TnT)_*YmJ*o_Q@om~(GdrhI{$Fsx_zrkupc#y{DK1WOUR>tk>ZE) ziOLoBkhZZ?0Uf}cm>GsA>Rd6V8@JF)J*EQlQ<=JD@m<)hyElXR0`pTku*3MU`HJn| zIf7$)RlK^pW-$87U;431;Ye4Ie+l~_B3*bH1>*yKzn23cH0u(i5pXV! z4K?{3oF7ZavmmtTq((wtml)m6i)8X6ot_mrE-QJCW}Yn!(3~aUHYG=^fA<^~`e3yc z-NWTb{gR;DOUcK#zPbN^D*e=2eR^_!(!RKkiwMW@@yYtEoOp4XjOGgzi`;=8 zi3`Ccw1%L*y(FDj=C7Ro-V?q)-%p?Ob2ZElu`eZ99n14-ZkEV#y5C+{Pq87Gu3&>g zFy~Wk7^6v*)4pF3@F@rE__k3ikx(hzN3@e*^0=KNA6|jC^B5nf(XaoQaZN?Xi}Rn3 z$8&m*KmWvPaUQ(V<#J+S&zO|8P-#!f%7G+n_%sXp9=J%Z4&9OkWXeuZN}ssgQ#Tcj z8p6ErJQJWZ+fXLCco=RN8D{W%+*kko*2-LEb))xcHwNl~Xmir>kmAxW?eW50Osw3# zki8Fl$#fvw*7rqd?%E?}ZX4`c5-R&w!Y0#EBbelVXSng+kUfeUiqofPehl}$ormli zg%r)}?%=?_pHb9`Cq9Z|B`L8b>(!+8HSX?`5+5mm81AFXfnAt1*R3F z%b2RPIacKAddx%JfQ8l{3U|vK@W7KB$CdLqn@wP^?azRks@x8z59#$Q*7q!KilY-P zHUbs(IFYRGG1{~@RF;Lqyho$~7^hNC`NL3kn^Td%A7dRgr_&`2k=t+}D-o9&C!y^? z6MsQ=tc3g0xkK(O%DzR9nbNB(r@L;1zQrs8mzx&4dz}?3KNYozOW5;=w18U6$G4U2 z#2^qRLT*Mo4bV1Oeo1PKQ2WQS2Y-hv&S|C7`xh6=Pj7MNLC5K-zokZ67S)C;(F0Dd zloDK2_o1$Fmza>EMj3X9je7e%Q`$39Dk~GoOj89-6q9|_WJlSl!!+*{R=tGp z8u|MuSwm^t7K^nUe+^0G3dkGZr3@(X+TL5eah)K^Tn zXEtHmR9UIaEYgD5Nhh(s*fcG_lh-mfy5iUF3xxpRZ0q3nZ=1qAtUa?(LnT9I&~uxX z`pV?+=|-Gl(kz?w!zIieXT}o}7@`QO>;u$Z!QB${a08_bW0_o@&9cjJUXzVyNGCm8 zm=W+$H!;_Kzp6WQqxUI;JlPY&`V}9C$8HZ^m?NvI*JT@~BM=()T()Ii#+*$y@lTZBkmMMda>7s#O(1YZR+zTG@&}!EXFG{ zEWPSDI5bFi;NT>Yj*FjH((=oe%t%xYmE~AGaOc4#9K_XsVpl<4SP@E!TgC0qpe1oi zNpxU2b0(lEMcoibQ-G^cxO?ySVW26HoBNa;n0}CWL*{k)oBu1>F18X061$SP{Gu67 z-v-Fa=Fl^u3lnGY^o5v)Bux}bNZ~ z5pL+7F_Esoun8^5>z8NFoIdb$sNS&xT8_|`GTe8zSXQzs4r^g0kZjg(b0bJvz`g<70u9Z3fQILX1Lj@;@+##bP|FAOl)U^9U>0rx zGi)M1(Hce)LAvQO-pW!MN$;#ZMX?VE(22lTlJrk#pB0FJNqVwC+*%${Gt#r_tH9I_ z;+#)#8cWAl?d@R+O+}@1A^hAR1s3UcW{G+>;X4utD2d9X(jF555}!TVN-hByV6t+A zdFR^aE@GNNgSxxixS2p=on4(+*+f<8xrwAObC)D5)4!z7)}mTpb7&ofF3u&9&wPS< zB62WHLGMhmrmOAgmJ+|c>qEWTD#jd~lHNgT0?t-p{T=~#EMcB| z=AoDKOL+qXCfk~F)-Rv**V}}gWFl>liXOl7Uec_8v)(S#av99PX1sQIVZ9eNLkhq$ zt|qu0b?GW_uo}TbU8!jYn8iJeIP)r@;!Ze_7mj{AUV$GEz6bDSDO=D!&C9!M@*S2! zfGyA|EPlXGMjkH6x7OMF?gKL7{GvGfED=Jte^p=91FpCu)#{whAMw`vSLa`K#atdN zThnL+7!ZNmP{rc=Z>%$meH;Qi1=m1E3Lq2D_O1-X5C;!I0L>zur@tPAC9*7Jeh)`;eec}1`nkRP(%iv-`N zZ@ip-g|7l6Hz%j%gcAM}6-nrC8oA$BkOTz^?dakvX?`^=ZkYh%vUE z9+&)K1UTK=ahYiaNn&G5nHUY5niLGus@p5E2@RwZufRvF{@$hW{;{3QhjvEHMvduO z#Wf-@oYU4ht?#uP{N3utVzV49mEc9>*TV_W2TVC`6+oI)zAjy$KJrr=*q##&kobiQ z1vNbya&OVjK`2pdRrM?LuK6BgrLN7H_3m z!qpNKg~87XgCwb#I=Q&0rI*l$wM!qTkXrx1ko5q-f;=R2fImRMwt5Qs{P*p^z@9ex z`2#v(qE&F%MXlHpdO#QEZyZftn4f05ab^f2vjxuFaat2}jke{j?5GrF=WYBR?gS(^ z9SBiNi}anzBDBRc+QqizTTQuJrzm^bNA~A{j%ugXP7McZqJ}65l10({wk++$=e8O{ zxWjG!Qp#5OmI#XRQQM?n6?1ztl6^D40hDJr?4$Wc&O_{*OfMfxe)V0=e{|N?J#fgE>j9jAajze$iN!*yeF%jJU#G1c@@rm zolGW!j?W6Q8pP=lkctNFdfgUMg92wlM4E$aks1??M$~WQfzzzXtS)wKrr2sJeCN4X zY(X^H_c^PzfcO8Bq(Q*p4c_v@F$Y8cHLrH$`pJ2}=#*8%JYdqsqnGqEdBQMpl!Ot04tUGSXTQdsX&GDtjbWD=prcCT9(+ z&UM%lW%Q3yrl1yiYs;LxzIy>2G}EPY6|sBhL&X&RAQrSAV4Tlh2nITR?{6xO9ujGu zr*)^E`>o!c=gT*_@6S&>0POxcXYNQd&HMw6<|#{eSute2C3{&h?Ah|cw56-AP^f8l zT^kvZY$YiH8j)sk7_=;gx)vx-PW`hbSBXJGCTkpt;ap(}G2GY=2bbjABU5)ty%G#x zAi07{Bjhv}>OD#5zh#$0w;-vvC@^}F! z#X$@)zIs1L^E;2xDAwEjaXhTBw2<{&JkF*`;c3<1U@A4MaLPe{M5DGGkL}#{cHL%* zYMG+-Fm0#qzPL#V)TvQVI|?_M>=zVJr9>(6ib*#z8q@mYKXDP`k&A4A};xMK0h=yrMp~JW{L?mE~ph&1Y1a#4%SO)@{ zK2juwynUOC)U*hVlJU17%llUxAJFuKZh3K0gU`aP)pc~bE~mM!i1mi!~LTf>1Wp< zuG+ahp^gH8g8-M$u{HUWh0m^9Rg@cQ{&DAO{PTMudV6c?ka7+AO& z746QylZ&Oj`1aqfu?l&zGtJnpEQOt;OAFq19MXTcI~`ZcoZmyMrIKDFRIDi`FH)w; z8+*8tdevMDv*VtQi|e}CnB_JWs>fhLOH-+Os2Lh!&)Oh2utl{*AwR)QVLS49iTp{6 z;|172Jl!Ml17unF+pd+Ff@jIE-{Oxv)5|pOm@CkHW?{l}b@1>Pe!l}VccX#xp@xgJ zyE<&ep$=*vT=}7vtvif0B?9xw_3Gej7mN*dOHdQPtW5kA5_zGD zpA4tV2*0E^OUimSsV#?Tg#oiQ>%4D@1F5@AHwT8Kgen$bSMHD3sXCkq8^(uo7CWk`mT zuslYq`6Yz;L%wJh$3l1%SZv#QnG3=NZ=BK4yzk#HAPbqXa92;3K5?0kn4TQ`%E%X} z&>Lbt!!QclYKd6+J7Nl@xv!uD%)*bY-;p`y^ZCC<%LEHUi$l5biu!sT3TGGSTPA21 zT8@B&a0lJHVn1I$I3I1I{W9fJAYc+8 zVj8>HvD}&O`TqU2AAb={?eT;0hyL(R{|h23=4fDSZKC32;wWxsVj`P z3J3{M$PwdH!ro*Cn!D&=jnFR>BNGR<<|I8CI@+@658Dy(lhqbhXfPTVecY@L8%`3Q z1Fux2w?2C3th60jI~%OC9BtpNF$QPqcG+Pz96qZJ71_`0o0w_q7|h&O>`6U+^BA&5 zXd5Zp1Xkw~>M%RixTm&OqpNl8Q+ue=92Op_>T~_9UON?ZM2c0aGm=^A4ejrXj3dV9 zhh_bCt-b9`uOX#cFLj!vhZ#lS8Tc47OH>*)y#{O9?AT~KR9LntM|#l#Dlm^8{nZdk zjMl#>ZM%#^nK2TPzLcKxqx24P7R1FPlBy7LSBrRvx>fE$9AJ;7{PQm~^LBX^k#6Zq zw*Z(zJC|`!6_)EFR}8|n8&&Rbj8y028~P~sFXBFRt+tmqH-S3<%N;C&WGH!f3{7cm zy_fCAb9@HqaXa1Y5vFbxWf%#zg6SI$C+Uz5=CTO}e|2fjWkZ;Dx|84Ow~bkI=LW+U zuq;KSv9VMboRvs9)}2PAO|b(JCEC_A0wq{uEj|3x@}*=bOd zwr{TgeCGG>HT<@Zeq8y}vTpwDg#UBvD)BEs@1KP$^3$sh&_joQPn{hjBXmLPJ{tC) z*HS`*2+VtJO{|e$mM^|qv1R*8i(m1`%)}g=SU#T#0KlTM2RSvYUc1fP+va|4;5}Bfz98UvDCpq7}+SMV&;nX zQw~N6qOX{P55{#LQkrZk(e5YGzr|(B;Q;ju;2a`q+S9bsEH@i1{_Y0;hWYn1-79jl z5c&bytD*k)GqrVcHn6t-7kinadiD>B{Tl`ZY@`g|b~pvHh5!gKP4({rp?D0aFd_cN zhHRo4dd5^S6ViN(>(28qZT6E>??aRhc($kP`>@<+lIKS5HdhjVU;>f7<4))E*5|g{ z&d1}D|vpuV^eRj5j|xx9nwaCxXFG?Qbjn~_WSy=N}P0W>MP zG-F%70lX5Xr$a)2i6?i|iMyM|;Jtf*hO?=Jxj12oz&>P=1#h~lf%#fc73M2_(SUM- zf&qnjS80|_Y0lDgl&I?*eMumUklLe_=Td!9G@eR*tcPOgIShJipp3{A10u(4eT~DY zHezEj8V+7m!knn7)W!-5QI3=IvC^as5+TW1@Ern@yX| z7Nn~xVx&fGSr+L%4iohtS3w^{-H1A_5=r&x8}R!YZvp<2T^YFvj8G_vm}5q;^UOJf ztl=X3iL;;^^a#`t{Ae-%5Oq{?M#s6Npj+L(n-*LMI-yMR{)qki!~{5z{&`-iL}lgW zxo+tnvICK=lImjV$Z|O_cYj_PlEYCzu-XBz&XC-JVxUh9;6*z4fuBG+H{voCC;`~GYV|hj%j_&I zDZCj>Q_0RCwFauYoVMiUSB+*Mx`tg)bWmM^SwMA+?lBg12QUF_x2b)b?qb88K-YUd z0dO}3k#QirBV<5%jL$#wlf!60dizu;tsp(7XLdI=eQs?P`tOZYMjVq&jE)qK*6B^$ zBe>VvH5TO>s>izhwJJ$<`a8fakTL!yM^Zfr2hV9`f}}VVUXK39p@G|xYRz{fTI+Yq z20d=)iwjuG9RB$%$^&8#(c0_j0t_C~^|n+c`Apu|x7~;#cS-s=X1|C*YxX3ailhg_|0`g!E&GZJEr?bh#Tpb8siR=JxWKc{#w7g zWznLwi;zLFmM1g8V5-P#RsM@iX>TK$xsWuujcsVR^7TQ@!+vCD<>Bk9tdCo7Mzgq5 zv8d>dK9x8C@Qoh01u@3h0X_`SZluTb@5o;{4{{eF!-4405x8X7hewZWpz z2qEi4UTiXTvsa(0X7kQH{3VMF>W|6;6iTrrYD2fMggFA&-CBEfSqPlQDxqsa>{e2M z(R5PJ7uOooFc|9GU0ELA%m4&4Ja#cQpNw8i8ACAoK6?-px+oBl_yKmenZut#Xumjz zk8p^OV2KY&?5MUwGrBOo?ki`Sxo#?-Q4gw*Sh0k`@ zFTaYK2;}%Zk-68`#5DXU$2#=%YL#S&MTN8bF+!J2VT6x^XBci6O)Q#JfW{YMz) zOBM>t2rSj)n#0a3cjvu}r|k3od6W(SN}V-cL?bi*Iz-8uOcCcsX0L>ZXjLqk zZu2uHq5B|Kt>e+=pPKu=1P@1r9WLgYFq_TNV1p9pu0erHGd!+bBp!qGi+~4A(RsYN@CyXNrC&hxGmW)u5m35OmWwX`I+0yByglO`}HC4nGE^_HUs^&A(uaM zKPj^=qI{&ayOq#z=p&pnx@@k&I1JI>cttJcu@Ihljt?6p^6{|ds`0MoQwp+I{3l6` zB<9S((RpLG^>=Kic`1LnhpW2=Gu!x`m~=y;A`Qk!-w`IN;S8S930#vBVMv2vCKi}u z6<-VPrU0AnE&vzwV(CFC0gnZYcpa-l5T0ZS$P6(?9AM;`Aj~XDvt;Jua=jIgF=Fm? zdp=M$>`phx%+Gu};;-&7T|B1AcC#L4@mW5SV_^1BRbo6;2PWe$r+npRV`yc;T1mo& z+~_?7rA+(Um&o@Tddl zL_hxvWk~a)yY}%j`Y+200D%9$bWHy&;(yj{jpi?Rtz{J66ANw)UyPOm;t6FzY3$hx zcn)Ir79nhFvNa7^a{SHN7XH*|Vlsx`CddPnA&Qvh8aNhEA;mPVv;Ah=k<*u!Zq^7 z<=xs*iQTQOMMcg|(NA_auh@x`3#_LFt=)}%SQppP{E>mu_LgquAWvh<>L7tf9+~rO znwUDS52u)OtY<~!d$;m9+87aO+&`#2ICl@Y>&F{jI=H(K+@3M1$rr=*H^dye#~TyD z!){#Pyfn+|ugUu}G;a~!&&0aqQ59U@UT3|_JuBlYUpT$2+11;}JBJ`{+lQN9T@QFY z5+`t;6(TS0F?OlBTE!@7D`8#URDNqx2t6`GZ{ZgXeS@v%-eJzZOHz18aS|svxII$a zZeFjrJ*$IwX$f-Rzr_G>xbu@euGl)B7pC&S+CmDJBg$BoV~jxSO#>y z33`bupN#LDoW0feZe0%q8un0rYN|eRAnwDHQ6e_)xBTbtoZtTA=Fvk){q}9Os~6mQ zKB80VI_&6iSq`LnK7*kfHZoeX6?WE}8yjuDn=2#JG$+;-TOA1%^=DnXx%w{b=w}tS zQbU3XxtOI8E(!%`64r2`zog;5<0b4i)xBmGP^jiDZ2%HNSxIf3@wKs~uk4%3Mxz;~ zts_S~E4>W+YwI<-*-$U8*^HKDEa8oLbmqGg?3vewnaNg%Mm)W=)lcC_J+1ov^u*N3 zXJ?!BrH-+wGYziJq2Y#vyry6Z>NPgkEk+Ke`^DvNRdb>Q2Nlr#v%O@<5hbflI6EKE z9dWc0-ORk^T}jP!nkJ1imyjdVX@GrjOs%cpgA8-c&FH&$(4od#x6Y&=LiJZPINVyW z0snY$8JW@>tc2}DlrD3StQmA0Twck~@>8dSix9CyQOALcREdxoM$Sw*l!}bXKq9&r zysMWR@%OY24@e`?+#xV2bk{T^C_xSo8v2ZI=lBI*l{RciPwuE>L5@uhz@{!l)rtVlWC>)6(G)1~n=Q|S!{E9~6*fdpa*n z!()-8EpTdj=zr_Lswi;#{TxbtH$8*G=UM`I+icz7sr_SdnHXrv=?iEOF1UL+*6O;% zPw>t^kbW9X@oEXx<97%lBm-9?O_7L!DeD)Me#rwE54t~UBu9VZ zl_I1tBB~>jm@bw0Aljz8! zXBB6ATG6iByKIxs!qr%pz%wgqbg(l{65DP4#v(vqhhL{0b#0C8mq`bnqZ1OwFV z7mlZZJFMACm>h9v^2J9+^_zc1=JjL#qM5ZHaThH&n zXPTsR8(+)cj&>Un{6v*z?@VTLr{TmZ@-fY%*o2G}*G}#!bmqpoo*Ay@U!JI^Q@7gj;Kg-HIrLj4}#ec4~D2~X6vo;ghep-@&yOivYP zC19L0D`jjKy1Yi-SGPAn94(768Tcf$urAf{)1)9W58P`6MA{YG%O?|07!g9(b`8PXG1B1Sh0?HQmeJtP0M$O$hI z{5G`&9XzYhh|y@qsF1GnHN|~^ru~HVf#)lOTSrv=S@DyR$UKQk zjdEPFDz{uHM&UM;=mG!xKvp;xAGHOBo~>_=WFTmh$chpC7c`~7?36h)7$fF~Ii}8q zF|YXxH-Z?d+Q+27Rs3X9S&K3N+)OBxMHn1u(vlrUC6ckBY@@jl+mgr#KQUKo#VeFm zFwNYgv0<%~Wn}KeLeD9e1$S>jhOq&(e*I@L<=I5b(?G(zpqI*WBqf|Zge0&aoDUsC zngMRA_Kt0>La+Erl=Uv_J^p(z=!?XHpenzn$%EA`JIq#yYF?JLDMYiPfM(&Csr#f{ zdd+LJL1by?xz|D8+(fgzRs~(N1k9DSyK@LJygwaYX8dZl0W!I&c^K?7)z{2is;OkE zd$VK-(uH#AUaZrp=1z;O*n=b?QJkxu`Xsw&7yrX0?(CX=I-C#T;yi8a<{E~?vr3W> zQrpPqOW2M+AnZ&p{hqmHZU-;Q(7?- zP8L|Q0RM~sB0w1w53f&Kd*y}ofx@c z5Y6B8qGel+uT1JMot$nT1!Tim6{>oZzJXdyA+4euOLME?5Fd_85Uk%#E*ln%y{u8Q z$|?|R@Hpb~yTVK-Yr_S#%NUy7EBfYGAg>b({J|5b+j-PBpPy$Ns`PaJin4JdRfOaS zE|<HjH%NuJgsd2wOlv>~y=np%=2)$M9LS|>P)zJ+Fei5vYo_N~B0XCn+GM76 z)Xz3tg*FRVFgIl9zpESgdpWAavvVViGlU8|UFY{{gVJskg*I!ZjWyk~OW-Td4(mZ6 zB&SQreAAMqwp}rjy`HsG({l2&q5Y52<@AULVAu~rWI$UbFuZs>Sc*x+XI<+ez%$U)|a^unjpiW0l0 zj1!K0(b6$8LOjzRqQ~K&dfbMIE=TF}XFAi)$+h}5SD3lo z%%Qd>p9se=VtQG{kQ;N`sI)G^u|DN#7{aoEd zkksYP%_X$Rq08);-s6o>CGJ<}v`qs%eYf+J%DQ^2k68C%nvikRsN?$ap--f+vCS`K z#&~)f7!N^;sdUXu54gl3L=LN>FB^tuK=y2e#|hWiWUls__n@L|>xH{%8lIJTd5`w? zSwZbnS;W~DawT4OwSJVdAylbY+u5S+ZH{4hAi2&}Iv~W(UvHg(1GTZRPz`@{SOqzy z(8g&Dz=$PfRV=6FgxN~zo+G8OoPI&d-thcGVR*_^(R8COTM@bq?fDwY{}WhsQS1AK zF6R1t8!RdFmfocpJ6?9Yv~;WYi~XPgs(|>{5})j!AR!voO7y9&cMPo#80A(`za@t>cx<0;qxM@S*m(jYP)dMXr*?q0E`oL;12}VAep179uEr8c<=D zr5?A*C{eJ`z9Ee;E$8)MECqatHkbHH z&Y+ho0B$31MIB-xm&;xyaFCtg<{m~M-QDbY)fQ>Q*Xibb~8ytxZQ?QMf9!%cV zU0_X1@b4d+Pg#R!`OJ~DOrQz3@cpiGy~XSKjZQQ|^4J1puvwKeScrH8o{bscBsowomu z^f12kTvje`yEI3eEXDHJ6L+O{Jv$HVj%IKb|J{IvD*l6IG8WUgDJ*UGz z3!C%>?=dlfSJ>4U88)V+`U-!9r^@AxJBx8R;)J4Fn@`~k>8>v0M9xp90OJElWP&R5 zM#v*vtT}*Gm1^)Bv!s72T3PB0yVIjJW)H7a)ilkAvoaH?)jjb`MP>2z{%Y?}83 zUIwBKn`-MSg)=?R)1Q0z3b>dHE^)D8LFs}6ASG1|daDly_^lOSy&zIIhm*HXm1?VS=_iacG);_I9c zUQH1>i#*?oPIwBMJkzi_*>HoUe}_4o>2(SHWzqQ=;TyhAHS;Enr7!#8;sdlty&(>d zl%5cjri8`2X^Ds`jnw7>A`X|bl=U8n+3LKLy(1dAu8`g@9=5iw$R0qk)w8Vh_Dt^U zIglK}sn^)W7aB(Q>HvrX=rxB z+*L)3DiqpQ_%~|m=44LcD4-bxO3OO*LPjsh%p(k?&jvLp0py57oMH|*IMa(<|{m1(0S|x)?R-mqJ=I;_YUZA>J z62v*eSK;5w!h8J+6Z2~oyGdZ68waWfy09?4fU&m7%u~zi?YPHPgK6LDwphgaYu%0j zurtw)AYOpYKgHBrkX189mlJ`q)w-f|6>IER{5Lk97%P~a-JyCRFjejW@L>n4vt6#hq;!|m;hNE||LK3nw1{bJOy+eBJjK=QqNjI;Q6;Rp5 z&035pZDUZ#%Oa;&_7x0T<7!RW`#YBOj}F380Bq?MjjEhrvlCATPdkCTTl+2efTX$k zH&0zR1n^`C3ef~^sXzJK-)52(T}uTG%OF8yDhT76L~|^+hZ2hiSM*QA9*D5odI1>& z9kV9jC~twA5MwyOx(lsGD_ggYmztXPD`2=_V|ks_FOx!_J8!zM zTzh^cc+=VNZ&(OdN=y4Juw)@8-85lwf_#VMN!Ed(eQiRiLB2^2e`4dp286h@v@`O%_b)Y~A; zv}r6U?zs&@uD_+(_4bwoy7*uozNvp?bXFoB8?l8yG0qsm1JYzIvB_OH4_2G*IIOwT zVl%HX1562vLVcxM_RG*~w_`FbIc!(T=3>r528#%mwwMK}uEhJ()3MEby zQQjzqjWkwfI~;Fuj(Lj=Ug0y`>~C7`w&wzjK(rPw+Hpd~EvQ-ufQOiB4OMpyUKJhw zqEt~jle9d7S~LI~$6Z->J~QJ{Vdn3!c}g9}*KG^Kzr^(7VI5Gk(mHLL{itj_hG?&K4Ws0+T4gLfi3eu$N=`s36geNC?c zm!~}vG6lx9Uf^5M;bWntF<-{p^bruy~f?sk9 zcETAPQZLoJ8JzMMg<-=ju4keY@SY%Wo?u9Gx=j&dfa6LIAB|IrbORLV1-H==Z1zCM zeZcOYpm5>U2fU7V*h;%n`8 zN95QhfD994={1*<2vKLCNF)feKOGk`R#K~G=;rfq}|)s20&MCa65 zUM?xF5!&e0lF%|U!#rD@I{~OsS_?=;s_MQ_b_s=PuWdC)q|UQ&ea)DMRh5>fpQjXe z%9#*x=7{iRCtBKT#H>#v%>77|{4_slZ)XCY{s3j_r{tdpvb#|r|sbS^dU1x70$eJMU!h{Y7Kd{dl}9&vxQl6Jt1a` zHQZrWyY0?!vqf@u-fxU_@+}u(%Wm>0I#KP48tiAPYY!TdW(o|KtVI|EUB9V`CBBNaBLVih7+yMVF|GSoIQD0Jfb{ z!OXq;(>Z?O`1gap(L~bUcp>Lc@Jl-})^=6P%<~~9ywY=$iu8pJ0m*hOPzr~q`23eX zgbs;VOxxENe0UMVeN*>uCn9Gk!4siN-e>x)pIKAbQz!G)TcqIJ0`JBBaX>1-4_XO_-HCS^vr2vjv#7KltDZdyQ{tlWh4$Gm zB>|O1cBDC)yG(sbnc*@w6e%e}r*|IhpXckx&;sQCwGdKH+3oSG-2)Bf#x`@<4ETAr z0My%7RFh6ZLiZ_;X6Mu1YmXx7C$lSZ^}1h;j`EZd6@%JNUe=btBE z%s=Xmo1Ps?8G`}9+6>iaB8bgjUdXT?=trMu|4yLX^m0Dg{m7rpKNJey|EwHI+nN1e zL^>qN%5Fg)dGs4DO~uwIdXImN)QJ*Jhpj7$fq_^`{3fwpztL@WBB}OwQ#Epo-mqMO zsM$UgpFiG&d#)lzEQ{3Q;)&zTw;SzGOah-Dpm{!q7<8*)Ti_;xvV2TYXa}=faXZy? z3y?~GY@kl)>G&EvEijk9y1S`*=zBJSB1iet>0;x1Ai)*`^{pj0JMs)KAM=@UyOGtO z3y0BouW$N&TnwU6!%zS%nIrnANvZF&vB1~P5_d`x-giHuG zPJ;>XkVoghm#kZXRf>qxxEix;2;D1CC~NrbO6NBX!`&_$iXwP~P*c($EVV|669kDO zKoTLZNF4Cskh!Jz5ga9uZ`3o%7Pv`d^;a=cXI|>y;zC3rYPFLQkF*nv(r>SQvD*## z(Vo%^9g`%XwS0t#94zPq;mYGLKu4LU3;txF26?V~A0xZbU4Lmy`)>SoQX^m7fd^*E z+%{R4eN!rIk~K)M&UEzxp9dbY;_I^c} zOc{wlIrN_P(PPqi51k_$>Lt|X6A^|CGYgKAmoI#Li?;Wq%q~q*L7ehZkUrMxW67Jl zhsb~+U?33QS>eqyN{(odAkbopo=Q$Az?L+NZW>j;#~@wCDX?=L5SI|OxI~7!Pli;e zELMFcZtJY3!|=Gr2L4>z8yQ-{To>(f80*#;6`4IAiqUw`=Pg$%C?#1 z_g@hIGerILSU>=P>z{gM|DS91A4cT@PEIB^hSop!uhMo#2G;+tQSpDO_6nOnPWSLU zS;a9m^DFMXR4?*X=}d7l;nXuHk&0|m`NQn%d?8|Ab3A9l9Jh5s120ibWBdB z$5YwsK3;wvp!Kn@)Qae{ef`0#NwlRpQ}k^r>yos_Ne1;xyKLO?4)t_G4eK~wkUS2A&@_;)K0-03XGBzU+5f+uMDxC z(s8!8!RvdC#@`~fx$r)TKdLD6fWEVdEYtV#{ncT-ZMX~eI#UeQ-+H(Z43vVn%Yj9X zLdu9>o%wnWdvzA-#d6Z~vzj-}V3FQ5;axDIZ;i(95IIU=GQ4WuU{tl-{gk!5{l4_d zvvb&uE{%!iFwpymz{wh?bKr1*qzeZb5f6e6m_ozRF&zux2mlK=v_(_s^R6b5lu?_W4W3#<$zeG~Pd)^!4tzhs}-Sx$FJP>)ZGF(hVTH|C3(U zs0PO&*h_ zNA-&qZpTP$$LtIgfiCn07}XDbK#HIXdmv8zdz4TY;ifNIH-0jy(gMSByG2EF~Th#eb_TueZC` zE?3I>UTMpKQ})=C;6p!?G)M6w^u*A57bD?2X`m3X^6;&4%i_m(uGJ3Z5h`nwxM<)H z$I5m?wN>O~8`BGnZ=y^p6;0+%_0K}Dcg|K;+fEi|qoBqvHj(M&aHGqNF48~XqhtU? z^ogwBzRlOfpAJ+Rw7IED8lRbTdBdyEK$gPUpUG}j-M42xDj_&qEAQEtbs>D#dRd7Y z<&TpSZ(quQDHiCFn&0xsrz~4`4tz!CdL8m~HxZM_agu@IrBpyeL1Ft}V$HX_ZqDPm z-f89)pjuEzGdq-PRu`b1m+qBGY{zr_>{6Ss>F|xHZlJj9dt5HD$u`1*WZe)qEIuDSR)%z+|n zatVlhQ?$w#XRS7xUrFE;Y8vMGhQS5*T{ZnY=q1P?w5g$OKJ#M&e??tAmPWHMj3xhS ziGxapy?kn@$~2%ZY;M8Bc@%$pkl%Rvj!?o%agBvpQ-Q61n9kznC4ttrRNQ4%GFR5u zyv%Yo9~yxQJWJSfj z?#HY$y=O~F|2pZs22pu|_&Ajd+D(Mt!nPUG{|1nlvP`=R#kKH zO*s$r_%ss5h1YO7k0bHJ2CXN)Yd6CHn~W!R=SqkWe=&nAZu(Q1G!xgcUilM@YVei@2@a`8he z9@pM`)VB*=e7-MWgLlXlc)t;fF&-AwM{E-EX}pViFn0I0CNw2bNEnN2dj!^4(^zS3 zobUm1uQnpqk_4q{pl*n06=TfK_C>UgurKFjRXsK_LEn};=79`TB12tv6KzwSu*-C8 z;=~ohDLZylHQ|Mpx-?yql>|e=vI1Z!epyUpAcDCp4T|*RV&X`Q$0ogNwy6mFALo^@ z9=&(9txO8V@E!@6^(W0{*~CT>+-MA~vnJULBxCTUW>X5>r7*eXYUT0B6+w@lzw%n> z_VjJ<2qf|(d6jYq2(x$(ZDf!yVkfnbvNmb5c|hhZ^2TV_LBz`9w!e_V*W_(MiA7|= z&EeIIkw*+$Xd!)j8<@_<}A5;~A_>3JT*kX^@}cDoLd>Qj<`Se^wdUa(j0dp+Tl8EptwBm{9OGsdFEq zM`!pjf(Lm(`$e3FLOjqA5LnN5o!}z{ zNf}rJuZh@yUtq&ErjHeGzX4(!luV!jB&;FAP|!R_QHYw#^Z1LwTePAKJ6X&IDNO#; z)#I@Xnnzyij~C@UH~X51JCgQeF0&hTXnuoElz#m{heZRexWc0k4<>0+ClX7%0 zEBqCCld1tD9Zwkr4{?Nor19#E5-YKfB8d?qgR82-Ow2^AuNevly2*tHA|sK!ybYkX zm-sLQH72P&{vEAW6+z~O5d0qd=xW~rua~5a?ymYFSD@8&gV)E5@RNNBAj^C99+Z5Z zR@Pq55mbCQbz+Mn$d_CMW<-+?TU960agEk1J<>d>0K=pF19yN))a~4>m^G&tc*xR+yMD*S=yip-q=H zIlredHpsJV8H(32@Zxc@bX6a21dUV95Th--8pE6C&3F>pk=yv$yd6@Haw;$v4+Fcb zRwn{Qo@0`7aPa2LQOP}j9v>sjOo5Kqvn|`FLizX zB+@-u4Lw|jsvz{p^>n8Vo8H2peIqJJnMN}A)q6%$Tmig7eu^}K2 zrh$X?T|ZMsoh{6pdw1G$_T<`Ds-G=jc;qcGdK4{?dN2-XxjDNbb(7pk|3JUVCU4y; z)?LXR>f+AAu)JEiti_Zy#z5{RgsC}R(@jl%9YZ>zu~hKQ*AxbvhC378-I@{~#%Y`Z zy=a=9YpewPIC+gkEUUwtUL7|RU7=!^Aa}Mk^6uxOgRGA#JXjWLsjFUnix|Mau{hDT z7mn*z1m5g`vP(#tjT0Zy4eAY(br&!RiiXE=ZI!{sE1#^#%x^Z7t1U)b<;%Y}Q9=5v z;wpDCEZ@OE36TWT=|gxigT@VaW9BvHS05;_P(#s z8zI4XFQys}q)<`tkX$WnSarn{3e!s}4(J!=Yf>+Y>cP3f;vr63f2{|S^`_pWc)^5_!R z*(x-fuBxL51@xe!lnDBKi}Br$c$BMZ3%f2Sa6kLabiBS{pq*yj;q|k(86x`PiC{p6 z_bxCW{>Q2BA8~Ggz&0jkrcU+-$ANBsOop*ms>34K9lNYil@}jC;?cYP(m^P}nR6FV zk(M%48Z&%2Rx$A&FhOEirEhY0(dn;-k(qkTU)sFQ`+-ih+s@A8g?r8Pw+}2;35WYf zi}VO`jS`p(tc)$X$a>-#WXoW!phhatC*$}|rk>|wUU71eUJG^$c6_jwX?iSHM@6__ zvV|6%U*$sSXJu9SX?2%M^kK|}a2QJ8AhF{fuXrHZxXsI~O zGKX45!K7p*MCPEQ=gp?eu&#AW*pR{lhQR##P_*{c_DjMGL|3T3-bSJ(o$|M{ytU}> zAV>wq*uE*qFo9KvnA^@juy{x<-u*#2NvkV={Ly}ysKYB-k`K3@K#^S1Bb$8Y#0L0# z`6IkSG&|Z$ODy|VLS+y5pFJx&8tvPmMd8c9FhCyiU8~k6FwkakUd^(_ml8`rnl>JS zZV){9G*)xBqPz^LDqRwyS6w86#D^~xP4($150M)SOZRe9sn=>V#aG0Iy(_^YcPpIz8QYM-#s+n% z@Jd?xQq?Xk6=<3xSY7XYP$$yd&Spu{A#uafiIfy8gRC`o0nk{ezEDjb=q_qRAlR1d zFq^*9Gn)yTG4b}R{!+3hWQ+u3GT~8nwl2S1lpw`s0X_qpxv)g+JIkVKl${sYf_nV~B>Em>M;RlqGb5WVil(89 zs=ld@|#;dq1*vQGz=7--Br-|l) zZ%Xh@v8>B7P?~}?Cg$q9_={59l%m~O&*a6TKsCMAzG&vD>k2WDzJ6!tc!V)+oxF;h zJH;apM=wO?r_+*#;ulohuP=E>^zon}a$NnlcQ{1$SO*i=jnGVcQa^>QOILc)e6;eNTI>os=eaJ{*^DE+~jc zS}TYeOykDmJ=6O%>m`i*>&pO_S;qMySJIyP=}4E&J%#1zju$RpVAkZbEl+p%?ZP^C z*$$2b4t%a(e+%>a>d_f_<JjxI#J1x;=hPd1zFPx=6T$;;X1TD*2(edZ3f46zaAoW>L53vS_J*N8TMB|n+;LD| zC=GkQPpyDY#Am4l49chDv*gojhRj_?63&&8#doW`INATAo(qY#{q}%nf@eTIXmtU< zdB<7YWfyCmBs|c)cK>1)v&M#!yNj#4d$~pVfDWQc_ke1?fw{T1Nce_b`v|Vp5ig(H zJvRD^+ps46^hLX;=e2!2e;w9y1D@!D$c@Jc&%%%IL=+xzw55&2?darw=9g~>P z9>?Kdc$r?6c$m%x2S$sdpPl>GQZ{rC9mPS63*qjCVa?OIBj!fW zm|g?>CVfGXNjOfcyqImXR_(tXS(F{FcoNzKvG5R$IgGaxC@)i(e+$ME}vPVIhd|mx2IIE+f zM?9opQHIVgBWu)^A|RzXw!^??S!x)SZOwZaJkGjc<_}2l^eSBm!eAJG9T>EC6I_sy z?bxzDIAn&K5*mX)$RQzDA?s)-no-XF(g*yl4%+GBf`##bDXJ==AQk*xmnatI;SsLp zP9XTHq5mmS=iWu~9ES>b%Q=1aMa|ya^vj$@qz9S!ih{T8_PD%Sf_QrNKwgrXw9ldm zHRVR98*{C?_XNpJn{abA!oix_mowRMu^2lV-LPi;0+?-F(>^5#OHX-fPED zCu^l7u3E%STI}c4{J2!)9SUlGP_@!d?5W^QJXOI-Ea`hFMKjR7TluLvzC-ozCPn1`Tpy z!vlv@_Z58ILX6>nDjTp-1LlFMx~-%GA`aJvG$?8*Ihn;mH37eK**rmOEwqegf-Ccx zrIX4;{c~RK>XuTXxYo5kMiWMy)!IC{*DHG@E$hx?RwP@+wuad(P1{@%tRkyJRqD)3 zMHHHZ4boqDn>-=DgR5VlhQTpfVy182Gk;A_S8A1-;U1RR>+$62>(MUx@Nox$vTjHq z%QR=j!6Gdyb5wu7y(YUktwMuW5<@jl?m4cv4BODiT5o8qVdC0MBqGr@-YBIwnpZAY znX9(_uQjP}JJ=!~Ve9#5I~rUnN|P_3D$LqZcvBnywYhjlMSFHm`;u9GPla{5QD7(7*6Tb3Svr8;(nuAd81q$*uq6HC_&~je*Ca7hP4sJp0av{M8480wF zxASi7Qv+~@2U%Nu1Ud;s-G4CTVWIPyx!sg&8ZG0Wq zG_}i3C(6_1>q3w!EH7$Kwq8uBp2F2N7}l65mk1p*9v0&+;th=_E-W)E;w}P(j⁢ zv5o9#E7!G0XmdzfsS{efPNi`1b44~SZ4Z8fuX!I}#8g+(wxzQwUT#Xb2(tbY1+EUhGKoT@KEU9Ktl>_0 z%bjDJg;#*gtJZv!-Zs`?^}v5eKmnbjqlvnSzE@_SP|LG_PJ6CYU+6zY6>92%E+ z=j@TZf-iW4(%U{lnYxQA;7Q!b;^brF8n0D>)`q5>|WDDXLrqYU_tKN2>=#@~OE7grMnNh?UOz-O~6 z6%rHy{#h9K0AT+lDC7q4{hw^|q6*Ry;;L%Q@)Ga}$60_q%D)rv(CtS$CQbpq9|y1e zRSrN4;$Jyl{m5bZw`$8TGvb}(LpY{-cQ)fcyJv7l3S52TLXVDsphtv&aPuDk1OzCA z4A^QtC(!11`IsNx_HnSy?>EKpHJWT^wmS~hc^p^zIIh@9f6U@I2 zC=Mve{j2^)mS#U$e{@Q?SO6%LDsXz@SY+=cK_QMmXBIU)j!$ajc-zLx3V60EXJ!qC zi<%2x8Q24YN+&8U@CIlN zrZkcT9yh%LrlGS9`G)KdP(@9Eo-AQz@8GEFWcb7U=a0H^ZVbLmz{+&M7W(nXJ4sN8 zJLR7eeK(K8`2-}j(T7JsO`L!+CvbueT%izanm-^A1Dn{`1Nw`9P?cq;7no+XfC`K(GO9?O^5zNIt4M+M8LM0=7Gz8UA@Z0N+lg+cX)NfazRu z5D)~HA^(u%w^cz+@2@_#S|u>GpB+j4KzQ^&Wcl9f z&hG#bCA(Yk0D&t&aJE^xME^&E-&xGHhXn%}psEIj641H+Nl-}boj;)Zt*t(4wZ5DN z@GXF$bL=&pBq-#vkTkh>7hl%K5|3 z{`Vn9b$iR-SoGENp}bn4;fR3>9sA%X2@1L3aE9yTra;Wb#_`xWwLSLdfu+PAu+o3| zGVnpzPr=ch{uuoHjtw7+_!L_2;knQ!DuDl0R`|%jr+}jFzXtrHIKc323?JO{l&;VF z*L1+}JU7%QJOg|5|Tc|D8fN zJORAg=_vsy{ak|o);@)Yh8Lkcg@$FG3k@ep36BRa^>~UmnRPziS>Z=`Jb2x*Q#`%A zU*i3&Vg?TluO@X0O;r2Jl6LKLUOVhSqg1*qOt^|8*c7 zo(298@+r$k_wQNGHv{|$tW(T8L+4_`FQ{kEW5Jgg{yf7ey4ss_(SNKfz(N9lx&a;< je(UuV8hP?p&}TPdm1I$XmG#(RzlD&B2izSj9sl%y5~4qc diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 01f330a93e8fa..a7a990ab2a89e 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=f2b9ed0faf8472cbe469255ae6c86eddb77076c75191741b4a462f33128dd419 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-all.zip +distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 01f330a93e8fa..a7a990ab2a89e 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=f2b9ed0faf8472cbe469255ae6c86eddb77076c75191741b4a462f33128dd419 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-all.zip +distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b +distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/settings.gradle b/settings.gradle index 74315c6516653..b3a33e11c4ec4 100644 --- a/settings.gradle +++ b/settings.gradle @@ -49,9 +49,7 @@ List projects = [ 'docs', 'client:rest', 'client:rest-high-level', - 'client:rest-high-level:qa:ssl-enabled', 'client:sniffer', - 'client:transport', 'client:test', 'client:client-benchmark-noop-api-plugin', 'client:benchmark', @@ -90,7 +88,6 @@ List projects = [ 'distribution:tools:geoip-cli', 'distribution:tools:ansi-console', 'server', - 'server:cli', 'test:framework', 'test:fixtures:azure-fixture', 'test:fixtures:gcs-fixture', From 1ea66d77ebe4ef207fe569a13a4644372c9f6d01 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Thu, 30 Nov 2023 16:10:35 +0100 Subject: [PATCH 107/263] ESQL: GEO_POINT and CARTESIAN_POINT type support (#102177) * Initial support for geo_point * Based on the earlier prototype at https://github.com/elastic/elasticsearch/pull/98845 * The spatial types support changes between 8.11 and 8.12, so mixed clusters of these two versions should claim these types as unsupported, while pure 8.12 clusters should claim the latest type support. * The spatial types support changes between 8.11 and 8.12, so mixed clusters of these two versions should not run the spatial tests. * The test assumed data arrives in the original ingest order, and started with a LIMIT 10 after which predicates were applied. But in many situations this will not work, as the data is not in that order and the LIMIT 10 will result in different documents. So the solution is to create a query that does not depend on original order. * Fix format=csv output for ESQL geo_point * Fix format=csv|tsv output for ESQL geo_point * Refactor SpatialUtils and abstract away GeoPoint * GeoPoint implements SpatialPoint and the geo-specific knowledge was already in the SpatialUtils class, so we could increase the level of abstraction by using SpatialPoint everywhere. * This is in preparation for supporting CartesianPoint, and in future perhaps other systems. * Moved the SpatialCoordinateTypes back to QL module * For some reason the ESQL QA tests could not load it from ESQL, only from QL. Until we figure out what is going wrong with the classloader, we will return these classes to their previous module. * Support Cartesian points similar to Geo point * Primarily the difference is only that mapping to doc-values puts x as high 4 bytes, and y as low 4 bytes, while geo_point does the opposite. * As long as we only support doc-values for the compute engine, this is the only difference we need to worry about. * Exclude spatial types from sortability (mv_min/max) Even though geo_point and cartesian_point are backed by LONG, making them implicitly sortable, we exclude them from sorting because the sort order is not particularly useful. * Code review updates * Update changelog to include cartesian_point --- docs/changelog/102177.yaml | 5 + .../index/mapper/GeoPointFieldMapper.java | 17 +- .../mapper/GeoPointFieldMapperTests.java | 54 +- .../org/elasticsearch/test/ESTestCase.java | 28 + .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 28 +- .../elasticsearch/xpack/esql/CsvAssert.java | 18 +- .../xpack/esql/CsvTestUtils.java | 6 +- .../xpack/esql/CsvTestsDataLoader.java | 8 +- .../src/main/resources/airports.csv | 892 ++++++++++++++++++ .../src/main/resources/airports_web.csv | 850 +++++++++++++++++ .../src/main/resources/mapping-airports.json | 19 + .../main/resources/mapping-airports_web.json | 19 + .../src/main/resources/show.csv-spec | 18 +- .../src/main/resources/spatial.csv-spec | 98 ++ .../ToCartesianPointFromStringEvaluator.java | 126 +++ .../ToGeoPointFromStringEvaluator.java | 126 +++ .../ToStringFromCartesianPointEvaluator.java | 109 +++ .../ToStringFromGeoPointEvaluator.java | 109 +++ .../xpack/esql/action/ColumnInfo.java | 22 + .../xpack/esql/action/EsqlQueryResponse.java | 12 + .../xpack/esql/analysis/Verifier.java | 2 + .../operator/comparison/ComparisonMapper.java | 8 + .../function/EsqlFunctionRegistry.java | 4 + .../scalar/convert/ToCartesianPoint.java | 65 ++ .../function/scalar/convert/ToGeoPoint.java | 65 ++ .../function/scalar/convert/ToLong.java | 4 + .../function/scalar/convert/ToString.java | 32 +- .../function/scalar/multivalue/MvCount.java | 14 +- .../function/scalar/multivalue/MvMax.java | 7 +- .../function/scalar/multivalue/MvMin.java | 7 +- .../xpack/esql/formatter/TextFormat.java | 11 +- .../xpack/esql/formatter/TextFormatter.java | 12 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 6 + .../esql/planner/LocalExecutionPlanner.java | 22 +- .../xpack/esql/type/EsqlDataTypes.java | 12 +- .../esql/action/EsqlQueryResponseTests.java | 4 + .../xpack/esql/analysis/AnalyzerTests.java | 41 +- .../function/AbstractFunctionTestCase.java | 23 +- .../expression/function/TestCaseSupplier.java | 56 ++ .../AbstractScalarFunctionTestCase.java | 9 +- .../scalar/convert/ToStringTests.java | 17 + .../AbstractMultivalueFunctionTestCase.java | 105 +++ .../scalar/multivalue/MvCountTests.java | 4 +- .../scalar/multivalue/MvDedupeTests.java | 2 +- .../scalar/multivalue/MvMaxTests.java | 2 +- .../scalar/multivalue/MvMinTests.java | 2 +- .../AbstractBinaryComparisonTestCase.java | 3 +- .../xpack/esql/formatter/TextFormatTests.java | 41 +- .../esql/formatter/TextFormatterTests.java | 16 +- .../xpack/ql/util/SpatialCoordinateTypes.java | 119 +++ .../ql/util/SpatialCoordinateTypesTests.java | 54 ++ .../index/mapper/PointFieldMapper.java | 20 +- .../rest-api-spec/test/esql/30_types.yml | 60 ++ .../test/esql/40_unsupported_types.yml | 264 ++++-- 54 files changed, 3484 insertions(+), 193 deletions(-) create mode 100644 docs/changelog/102177.yaml create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_web.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_web.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java create mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java create mode 100644 x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java diff --git a/docs/changelog/102177.yaml b/docs/changelog/102177.yaml new file mode 100644 index 0000000000000..62d7b11b86513 --- /dev/null +++ b/docs/changelog/102177.yaml @@ -0,0 +1,5 @@ +pr: 102177 +summary: "GEO_POINT and CARTESIAN_POINT type support" +area: ES|QL +type: feature +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index 10e24fbeebb87..56b65dbf84c6b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -207,6 +207,7 @@ public FieldMapper build(MapperBuilderContext context) { stored.get(), hasDocValues.get(), geoParser, + nullValue.get(), scriptValues(), meta.get(), metric.get(), @@ -363,6 +364,7 @@ public static class GeoPointFieldType extends AbstractGeometryFieldType scriptValues; private final IndexMode indexMode; @@ -372,12 +374,14 @@ private GeoPointFieldType( boolean stored, boolean hasDocValues, Parser parser, + GeoPoint nullValue, FieldValues scriptValues, Map meta, TimeSeriesParams.MetricType metricType, IndexMode indexMode ) { super(name, indexed, stored, hasDocValues, parser, meta); + this.nullValue = nullValue; this.scriptValues = scriptValues; this.metricType = metricType; this.indexMode = indexMode; @@ -385,7 +389,7 @@ private GeoPointFieldType( // only used in test public GeoPointFieldType(String name, TimeSeriesParams.MetricType metricType, IndexMode indexMode) { - this(name, true, false, true, null, null, Collections.emptyMap(), metricType, indexMode); + this(name, true, false, true, null, null, null, Collections.emptyMap(), metricType, indexMode); } // only used in test @@ -478,6 +482,17 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext throw new IllegalStateException("unknown field data type [" + operation.name() + "]"); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (hasDocValues()) { + return new BlockDocValuesReader.LongsBlockLoader(name()); + } + // TODO: Currently we use longs in the compute engine and render to WKT in ESQL + return new BlockSourceReader.LongsBlockLoader( + valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKT) + ); + } + @Override public Query distanceFeatureQuery(Object origin, String pivot, SearchExecutionContext context) { failIfNotIndexedNorDocValuesFallback(context); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index bd7d34226abc9..cce44504d4f3e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -35,7 +35,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.TreeMap; +import java.util.function.Function; import static org.elasticsearch.geometry.utils.Geohash.stringEncode; import static org.elasticsearch.test.ListMatcher.matchesList; @@ -604,13 +604,17 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) public SyntheticSourceExample example(int maxVals) { if (randomBoolean()) { Tuple v = generateValue(); - return new SyntheticSourceExample(v.v1(), decode(encode(v.v2())), this::mapping); + return new SyntheticSourceExample(v.v1(), decode(encode(v.v2())), encode(v.v2()), this::mapping); } List> values = randomList(1, maxVals, this::generateValue); List in = values.stream().map(Tuple::v1).toList(); - List> outList = values.stream().map(t -> encode(t.v2())).sorted().map(this::decode).toList(); + // The results are currently sorted in order of encoded values, so we need to sort the expected values too + List outList = values.stream().map(v -> encode(v.v2())).sorted().map(this::decode).toList(); Object out = outList.size() == 1 ? outList.get(0) : outList; - return new SyntheticSourceExample(in, out, this::mapping); + + List outBlockList = outList.stream().map(this::encode).toList(); + Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; + return new SyntheticSourceExample(in, out, outBlock, this::mapping); } private Tuple generateValue() { @@ -627,26 +631,30 @@ private GeoPoint randomGeoPoint() { } private Object randomGeoPointInput(GeoPoint point) { - if (randomBoolean()) { - return Map.of("lat", point.lat(), "lon", point.lon()); - } - List coords = new ArrayList<>(); - coords.add(point.lon()); - coords.add(point.lat()); - if (ignoreZValue) { - coords.add(randomDouble()); - } - return Map.of("coordinates", coords, "type", "point"); + return switch (randomInt(4)) { + case 0 -> Map.of("lat", point.lat(), "lon", point.lon()); + case 1 -> new double[] { point.lon(), point.lat() }; + case 2 -> "POINT( " + point.lon() + " " + point.lat() + " )"; + default -> { + List coords = new ArrayList<>(); + coords.add(point.lon()); + coords.add(point.lat()); + if (ignoreZValue) { + coords.add(randomDouble()); + } + yield Map.of("coordinates", coords, "type", "point"); + } + }; } private long encode(GeoPoint point) { return new LatLonDocValuesField("f", point.lat(), point.lon()).numericValue().longValue(); } - private Map decode(long point) { + private GeoPoint decode(long point) { double lat = GeoEncodingUtils.decodeLatitude((int) (point >> 32)); double lon = GeoEncodingUtils.decodeLongitude((int) (point & 0xFFFFFFFF)); - return new TreeMap<>(Map.of("lat", lat, "lon", lon)); + return new GeoPoint(lat, lon); } private void mapping(XContentBuilder b) throws IOException { @@ -689,4 +697,18 @@ public List invalidExample() throws IOException { protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); } + + @Override + protected Function loadBlockExpected() { + return v -> asJacksonNumberOutput(((Number) v).longValue()); + } + + protected static Object asJacksonNumberOutput(long l) { + // Cast to int to mimic jackson-core behaviour in NumberOutput.outputLong() + if (l < 0 && l > Integer.MIN_VALUE || l >= 0 && l <= Integer.MAX_VALUE) { + return (int) l; + } else { + return l; + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index a597142ae1ed0..f8482a65bd92b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -49,6 +49,8 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.CompositeBytesReference; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -1189,6 +1191,32 @@ public static String randomDateFormatterPattern() { return randomFrom(FormatNames.values()).getName(); } + /** + * Generate a random valid point constrained to geographic ranges (lat, lon ranges). + */ + public static SpatialPoint randomGeoPoint() { + return new GeoPoint(randomDoubleBetween(-90, 90, true), randomDoubleBetween(-180, 180, true)); + } + + /** + * Generate a random valid point constrained to cartesian ranges. + */ + public static SpatialPoint randomCartesianPoint() { + double x = randomDoubleBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true); + double y = randomDoubleBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true); + return new SpatialPoint() { + @Override + public double getX() { + return x; + } + + @Override + public double getY() { + return y; + } + }; + } + /** * helper to randomly perform on consumer with value */ diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 5397681e231fd..af3a6804f2220 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -129,7 +130,32 @@ protected void assertResults( Logger logger ) { assertMetadata(expected, actualColumns, logger); - assertData(expected, actualValues, testCase.ignoreOrder, logger, value -> value == null ? "null" : value.toString()); + assertData(expected, actualValues, testCase.ignoreOrder, logger, EsqlSpecTestCase::valueToString); + } + + /** + * Unfortunately the GeoPoint.toString method returns the old format, but cannot be changed due to BWC. + * So we need to custom format GeoPoint as well as wrap Lists to ensure this custom conversion applies to multi-value fields + */ + private static String valueToString(Object value) { + if (value == null) { + return "null"; + } else if (value instanceof List list) { + StringBuilder sb = new StringBuilder("["); + for (Object field : list) { + if (sb.length() > 1) { + sb.append(", "); + } + sb.append(valueToString(field)); + } + return sb.append("]").toString(); + } else if (value instanceof SpatialPoint point) { + // TODO: This knowledge should be in GeoPoint or at least that package + // Alternatively we could just change GeoPoint.toString() to use WKT, but that has other side-effects + return "POINT (" + point.getX() + " " + point.getY() + ")"; + } else { + return value.toString(); + } } private Throwable reworkException(Throwable th) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java index 994dd2b99852d..80a88981cf5cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvAssert.java @@ -29,6 +29,8 @@ import static org.elasticsearch.xpack.esql.CsvTestUtils.logMetaData; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; @@ -119,7 +121,11 @@ private static void assertMetadata( var block = page.getBlock(column); var blockType = Type.asType(block.elementType()); - if (blockType == Type.LONG && (expectedType == Type.DATETIME || expectedType == UNSIGNED_LONG)) { + if (blockType == Type.LONG + && (expectedType == Type.DATETIME + || expectedType == Type.GEO_POINT + || expectedType == Type.CARTESIAN_POINT + || expectedType == UNSIGNED_LONG)) { continue; } if (blockType == Type.KEYWORD && (expectedType == Type.IP || expectedType == Type.VERSION || expectedType == Type.TEXT)) { @@ -195,6 +201,10 @@ public static void assertData( // convert the long from CSV back to its STRING form if (expectedType == Type.DATETIME) { expectedValue = rebuildExpected(expectedValue, Long.class, x -> UTC_DATE_TIME_FORMATTER.formatMillis((long) x)); + } else if (expectedType == Type.GEO_POINT) { + expectedValue = rebuildExpected(expectedValue, Long.class, x -> GEO.longAsPoint((long) x)); + } else if (expectedType == Type.CARTESIAN_POINT) { + expectedValue = rebuildExpected(expectedValue, Long.class, x -> CARTESIAN.longAsPoint((long) x)); } else if (expectedType == Type.IP) { // convert BytesRef-packed IP to String, allowing subsequent comparison with what's expected expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> DocValueFormat.IP.format((BytesRef) x)); @@ -205,7 +215,11 @@ public static void assertData( expectedValue = rebuildExpected(expectedValue, Long.class, x -> unsignedLongAsNumber((long) x)); } } - assertEquals(valueTransformer.apply(expectedValue), valueTransformer.apply(actualValue)); + assertEquals( + "Row[" + row + "] Column[" + column + "]", + valueTransformer.apply(expectedValue), + valueTransformer.apply(actualValue) + ); } var delta = actualRow.size() - expectedRow.size(); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 3ccf61b3a15ed..060a137b69b7c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -52,6 +52,8 @@ import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public final class CsvTestUtils { private static final int MAX_WIDTH = 20; @@ -386,7 +388,9 @@ public enum Type { (l, r) -> l instanceof Long maybeIP ? maybeIP.compareTo((Long) r) : l.toString().compareTo(r.toString()), Long.class ), - BOOLEAN(Booleans::parseBoolean, Boolean.class); + BOOLEAN(Booleans::parseBoolean, Boolean.class), + GEO_POINT(x -> x == null ? null : GEO.pointAsLong(GEO.stringAsPoint(x)), Long.class), + CARTESIAN_POINT(x -> x == null ? null : CARTESIAN.pointAsLong(CARTESIAN.stringAsPoint(x)), Long.class); private static final Map LOOKUP = new HashMap<>(); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index ed53b4bac839b..fd4600e5e64ff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -52,6 +52,8 @@ public class CsvTestsDataLoader { private static final TestsDataset APPS = new TestsDataset("apps", "mapping-apps.json", "apps.csv"); private static final TestsDataset LANGUAGES = new TestsDataset("languages", "mapping-languages.json", "languages.csv"); private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs", "mapping-ul_logs.json", "ul_logs.csv"); + private static final TestsDataset AIRPORTS = new TestsDataset("airports", "mapping-airports.json", "airports.csv"); + private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web", "mapping-airports_web.json", "airports_web.csv"); public static final Map CSV_DATASET_MAP = Map.of( EMPLOYEES.indexName, @@ -63,7 +65,11 @@ public class CsvTestsDataLoader { LANGUAGES.indexName, LANGUAGES, UL_LOGS.indexName, - UL_LOGS + UL_LOGS, + AIRPORTS.indexName, + AIRPORTS, + AIRPORTS_WEB.indexName, + AIRPORTS_WEB ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enricy-policy-languages.json"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports.csv new file mode 100644 index 0000000000000..8c20e876385a5 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports.csv @@ -0,0 +1,892 @@ +abbrev:keyword,name:text, scalerank:integer,type:keyword, location:geo_point +LUH, Sahnewal, 9, small, POINT(75.9570722403652 30.8503598561702) +SSE, Solapur, 9, mid, POINT(75.9330597710755 17.625415183635) +IXR, Birsa Munda, 9, mid, POINT(85.3235970368767 23.3177245989962) +AWZ, Ahwaz, 9, mid, POINT(48.7471065435931 31.3431585560757) +GWL, Gwalior, 9, [mid,military], POINT(78.2172186546348 26.285487697937) +HOD, Hodeidah Int'l, 9, mid, POINT(42.97109630194 14.7552534413725) +IDR, Devi Ahilyabai Holkar Int'l, 9, mid, POINT(75.8092915005895 22.727749187571) +ISK, Gandhinagar, 9, mid, POINT(73.8105674924689 19.9660205672806) +IXC, Chandigarh Int'l, 9, [major,military], POINT(76.8017261105242 30.6707248949667) +IXU, Aurangabad, 9, mid, POINT(75.3958432922005 19.8672969621082) +LYP, Faisalabad Int'l, 9, [mid,military], POINT(72.9878190922305 31.3627435480862) +OMS, Omsk Tsentralny, 9, mid, POINT(73.3163595376585 54.9576482934059) +OVB, Novosibirsk Tolmachev, 9, mid, POINT(82.6671524525865 55.0095847136264) +OZH, Zaporozhye Int'l, 9, [mid,military], POINT(35.3018728575279 47.8732635579023) +PKU, Simpang Tiga, 9, mid, POINT(101.446569298441 0.464600872998505) +ROP, Rota Int'l, 9, mid, POINT(145.243980298582 14.1717712971216) +SGC, Surgut, 9, mid, POINT(73.4084964764375 61.3401672194481) +TRZ, Tiruchirappalli, 9, mid, POINT(78.7089578747476 10.7603571306554) +TUK, Turbat Int'l, 9, mid, POINT(63.0279333519181 25.988794590011) +UET, Quetta Int'l, 9, mid, POINT(66.9487311480949 30.249043186181) +ZAH, Zahedan Int'l, 9, mid, POINT(60.900708564915 29.4752941956573) +MLG, Abdul Rachman Saleh, 9, [mid,military], POINT(112.711418617258 -7.92998002840567) +BAX, Barnaul, 9, mid, POINT(83.5504532124038 53.3633850813046) +VIAX, Adampur, 9, [military,mid], POINT(75.7584828456005 31.4329422397715) +VIBY, Bareilly, 9, military, POINT(79.452002687657 28.4218087161144) +OPQS, Dhamial, 9, small, POINT(73.0320498392002 33.5614146278861) +CJJ, Cheongju Int'l, 9, major, POINT(127.495916124681 36.7220227766673) +KWJ, Gwangju, 9, [mid,military], POINT(126.810839481226 35.1400051390198) +TAE, Daegu Int'l, 9, mid, POINT(128.637537699933 35.8999277969087) +USN, Ulsan, 9, mid, POINT(129.355731047528 35.5928957527107) +WIIT, Radin Inten II, 9, mid, POINT(105.176060419161 -5.242566777132) +IXD, Allahabad, 9, military, POINT(81.7317271462187 25.443522027821) +CEK, Chelyabinsk, 9, mid, POINT(61.5122589740201 55.2977919496055) +TNN, Tainan, 8, [military,mid], POINT(120.209733318093 22.950667918347) +RMQ, Taichung, 8, [military,mid], POINT(120.630703547584 24.2666555567115) +RTM, Rotterdam The Hague, 8, mid, POINT(4.43384434962876 51.9491301899382) +VOZ, Voronezh-Chertovitskoye, 8, mid, POINT(39.2254496447973 51.8126171268344) +LPL, Liverpool John Lennon, 8, major, POINT(-2.85862065784938 53.3363751054422) +VTZ, Vishakapatnam, 8, mid, POINT(83.2235216387465 17.7279577384364) +UPG, Sultan Hasanuddin Int'l, 8, major, POINT(119.545691342151 -5.05893689455779) +VAV, Vava'u Int'l, 8, mid, POINT(-173.968093944159 -18.5860058550654) +NCL, Newcastle Int'l, 8, major, POINT(-1.71034578407216 55.037084860802) +LCE, Goloson Int'l, 8, mid, POINT(-86.8514685020011 15.7451596659126) +MED, Madinah Int'l, 8, major, POINT(39.6991359560417 24.5442339605661) +YMX, Mirabel Int'l, 8, mid, POINT(-74.0287382984814 45.6832250979267) +PLQ, Palanga Int'l, 8, mid, POINT(21.0974463986251 55.9713426235358) +JAI, Jaipur Int'l, 8, mid, POINT(75.8010104192668 26.8211798100605) +IXW, Sonari, 8, mid, POINT(86.1724662363776 22.8154145110242) +YEI, Yenisehir, 8, mid, POINT(29.54492 40.2555395007473) +ADA, Şakirpaşa, 8, major, POINT(35.2969614268338 36.9852090641795) +ADQ, Kodiak, 8, mid, POINT(-152.485638515235 57.7485921070483) +AMA, Amarillo Int'l, 8, major, POINT(-101.705352772697 35.2184031919398) +ASP, Alice Springs, 8, mid, POINT(133.902918 -23.801968) +ATQ, Raja Sansi Int'l, 8, [mid,military], POINT(74.8071559719824 31.7068220258888) +BBI, Biju Patnaik, 8, mid, POINT(85.8168899544429 20.2526659754734) +BET, Bethel, 8, mid, POINT(-161.83898695944 60.7787379834088) +BGA, Palonegro, 8, mid, POINT(-73.1809207725361 7.12770915402685) +BHM, Birmingham Int'l, 8, major, POINT(-86.7523773615462 33.5618672828058) +BHQ, Broken Hill, 8, mid, POINT(141.470407303097 -31.998996737463) +BIL, Logan Int'l, 8, major, POINT(-108.536929388125 45.8036855715278) +BIS, Bismarck Muni., 8, mid, POINT(-100.757471303717 46.7751066661614) +BJX, Del Bajio Int'l, 8, mid, POINT(-101.478753382467 20.9858871211938) +BNI, Benin, 8, mid, POINT(5.603682560067 6.31716689207818) +BOJ, Bourgas, 8, major, POINT(27.5164093662953 42.5670835487702) +BRE, Bremen, 8, major, POINT(8.7858617703132 53.052287104156) +BRM, Jacinto Lara Int'l, 8, mid, POINT(-69.356102 10.0453) +BRO, Brownsville-South Padre Island Int'l, 8, mid, POINT(-97.431765340232 25.9062743545347) +BRS, Bristol Int'l, 8, major, POINT(-2.71086469134308 51.3862934189148) +BTR, Baton Rouge Metro, 8, major, POINT(-91.1567544048105 30.5326138040586) +BTS, Bratislava-M.R. Štefánik, 8, major, POINT(17.1999850022208 48.1698379062535) +BTV, Burlington Int'l, 8, mid, POINT(-73.1550787790668 44.4692066040732) +CAE, Columbia Metro, 8, major, POINT(-81.1093352429377 33.9342054584275) +CCJ, Calicut Int'l, 8, major, POINT(75.950993063051 11.1395520526064) +CCK, Cocos (Keeling) Islands, 8, mid, POINT(96.8287472144207 -12.1851585953293) +CFU, Corfu Int'l (Ioannis Kapodistrias), 8, mid, POINT(19.9147561641662 39.6067554505259) +CGQ, Changchun Longjia Int'l, 8, major, POINT(125.690456812998 43.993011479577) +CHS, Charleston Int'l, 8, [major,military], POINT(-80.0369337438262 32.8845301562965) +CJB, Coimbatore, 8, mid, POINT(77.038893772262 11.0301415125983) +CLJ, Someseni, 8, mid, POINT(23.6869812680486 46.7826626340243) +CMW, Ignacio Agramonte, 8, mid, POINT(-77.8451039935167 21.4247037281961) +CPR, Casper/Natrona County Int'l, 8, major, POINT(-106.464444809692 42.8971900483006) +CRK, Clark Int'l, 8, major, POINT(120.550770223914 15.1876422423888) +CRW, Yeager, 8, [major,military], POINT(-81.5964164667526 38.3705914372865) +CTA, Catania Fontanarossa, 8, major, POINT(15.0674605007053 37.470072800341) +CTM, Chetumal Int'l, 8, mid, POINT(-88.3242600415365 18.506434233376) +CWL, Cardiff, 8, major, POINT(-3.33956876429118 51.3986220911017) +CYB, Gerrard Smith, 8, mid, POINT(-79.879461638003 19.6898653962844) +CZM, Cozumel Int'l, 8, mid, POINT(-86.9304064070436 20.5115543771647) +DAY, James M. Cox Dayton Int'l, 8, major, POINT(-84.2204594238102 39.8990402865362) +DBO, Dubbo, 8, mid, POINT(148.569717 -32.218681) +DCA, Washington Nat'l, 8, major, POINT(-77.0433373925631 38.8537162012123) +DGO, Durango Int'l, 8, mid, POINT(-104.533846024964 24.1261948326182) +DNK, Voloskoye, 8, mid, POINT(35.0939060224975 48.3675718021117) +DOK, Donetsk, 8, major, POINT(37.7448085572103 48.0691671285582) +DZO, Santa Bernardina Int'l, 8, mid, POINT(-56.4992636213744 -33.3591084475501) +EDI, Edinburgh Int'l, 8, major, POINT(-3.36428468513554 55.9485540113639) +EIS, Terrance B. Lettsome Int'l, 8, mid, POINT(-64.5371514365794 18.4443618557983) +EKO, Elko Reg., 8, mid, POINT(-115.786479232249 40.8276058815225) +ESE, Ensenada, 8, mid, POINT(-116.595724400418 31.7977139760569) +FAE, Vágar, 8, mid, POINT(-7.2708 62.0625) +FAR, Hector Int'l, 8, [mid,military], POINT(-96.8254561269675 46.9198178811323) +FAT, Fresno Yosemite Int'l, 8, mid, POINT(-119.720001323576 36.7698128373959) +FLG, Flagstaff Pulliam, 8, mid, POINT(-111.674656171675 35.1389116757976) +FRS, Mundo Maya Int'l, 8, mid, POINT(-89.8778404226508 16.9149741642226) +FSD, Sioux Falls Reg., 8, mid, POINT(-96.7313831017541 43.5801934972763) +GEG, Spokane Int'l, 8, major, POINT(-117.536836628585 47.6254781278368) +GGT, Exuma Int'l, 8, mid, POINT(-75.872613085304 23.5638829069259) +GIB, Gibraltar, 8, mid, POINT(-5.34677180033388 36.1512747504173) +GRR, Gerald R. Ford Int'l, 8, mid, POINT(-85.529573207274 42.8847776020908) +GSO, Triad Int'l, 8, major, POINT(-79.9364867577484 36.1053781998932) +GTF, Great Falls Int'l, 8, mid, POINT(-111.35668472784 47.482270729263) +GZT, Gaziantep Oğuzeli Int'l, 8, major, POINT(37.47380325219 36.9453633446875) +HBX, Hubli, 8, mid, POINT(75.0863155680281 15.3591833386229) +HDY, Hat Yai Int'l, 8, mid, POINT(100.393751274671 6.93634231940664) +HFE, Hefei-Luogang, 8, mid, POINT(117.304197015888 31.7798576795778) +HRG, Hurghada Int'l, 8, major, POINT(33.8071606414118 27.1804260918186) +HRK, Kharkov Int'l, 8, major, POINT(36.2822010773503 49.9215360631551) +HSV, Huntsville Int'l, 8, major, POINT(-86.7749430563373 34.6483344609319) +IBA, Ibadan, 8, mid, POINT(3.9738133433229 7.36034397269393) +ICT, Kansas City Int'l, 8, major, POINT(-97.4287387683976 37.6529279603903) +ILM, Wilmington Int'l, 8, mid, POINT(-77.9103756560469 34.2667840671996) +ILR, Ilorin Int'l, 8, mid, POINT(4.49484038819934 8.43537651935241) +INL, Falls Int'l, 8, mid, POINT(-93.3980027552794 48.5659930848414) +INV, Inverness, 8, mid, POINT(-4.06359175587141 57.5395002923424) +IPL, Imperial Cty., 8, mid, POINT(-115.57199556658 32.8339586685524) +IXJ, Jammu, 8, mid, POINT(74.8423077638915 32.6810428886225) +IXM, Madurai, 8, mid, POINT(78.0911394937194 9.83718627877566) +JDH, Jodhpur, 8, [major,military], POINT(73.0505491895671 26.2637623458351) +JLR, Jabalpur, 8, mid, POINT(80.0587438885277 23.1845328746465) +JRO, Kilimanjaro Int'l, 8, mid, POINT(37.0651896067748 -3.42444495998178) +KAD, Kaduna, 8, mid, POINT(7.32525347407434 10.6946192862391) +KGA, Kananga, 8, mid, POINT(22.4783332482689 -5.90016656227041) +KMS, Kumasi, 8, mid, POINT(-1.59257526582361 6.71460638750365) +KNA, Viña del Mar, 8, mid, POINT(-71.4806025354969 -32.948391765136) +KNU, Kanpur, 8, mid, POINT(80.3675338772002 26.4388334467042) +KOA, Kona Int'l at Keahole, 8, mid, POINT(-156.040889471058 19.7370991399442) +KOI, Kirkwall, 8, mid, POINT(-2.90137849524508 58.9544288788303) +KTU, Kota, 8, mid, POINT(75.8504977944552 25.1634187166743) +KYA, Konya, 8, [major,military], POINT(32.5756732669687 37.9839945531695) +LEX, Blue Grass, 8, major, POINT(-84.5982681918786 38.0374273181372) +LIH, Lihue, 8, mid, POINT(-159.349084290522 21.9781243162088) +LIT, Clinton National, 8, major, POINT(-92.2205881319289 34.7284300415179) +LMM, Los Mochis, 8, mid, POINT(-109.082694645261 25.688508826099) +LOV, Venustiano Carranza Int'l, 8, mid, POINT(-101.464960031751 26.9553927160699) +LRD, Laredo Int'l, 8, mid, POINT(-99.4556603976513 27.5436657175825) +LSI, Sumburgh, 8, mid, POINT(-1.28806068838753 59.8766899598999) +LTK, Bassel Al-Assad Int'l, 8, major, POINT(35.9442407096663 35.4073114596744) +LTN, London Luton, 8, major, POINT(-0.376227267397439 51.8802952570969) +LYR, Svalbard Longyear, 8, mid, POINT(15.495229 78.246717) +MBJ, Sangster Int'l, 8, mid, POINT(-77.9183907635752 18.5011549298249) +MDL, Mandalay Int'l, 8, mid, POINT(95.9706535950217 21.7055490680274) +MDW, Chicago Midway Int'l, 8, major, POINT(-87.7421266885612 41.7883492597409) +MEC, Eloy Alfaro Int'l, 8, [mid,military], POINT(-80.6833845995774 -0.949557002112883) +MGM, Montgomery Reg., 8, major, POINT(-86.3903074602686 32.3045879909631) +MHT, Manchester-Boston Reg., 8, major, POINT(-71.4375239091857 42.9279139945886) +DNMA, Maiduguri Int'l, 8, mid, POINT(13.0851390162471 11.8534713188527) +MJM, Mbuji Mayi, 8, mid, POINT(23.5721091989052 -6.12484541348812) +MOT, Minot Int'l, 8, mid, POINT(-101.2913855313 48.2556049212839) +MSO, Missoula Int'l, 8, mid, POINT(-114.083694923651 46.9187604768831) +MXL, Gen R.S. Taboada Int'l, 8, mid, POINT(-115.247874047841 32.6285643324607) +MXP, Malpensa, 8, major, POINT(8.71295953502437 45.6274405140381) +NLK, Norfolk Island, 8, mid, POINT(167.943394116205 -29.0351592555275) +NUE, Nurnberg, 8, major, POINT(11.0774179739096 49.4945052170345) +ODS, Odessa Int'l, 8, major, POINT(30.6768308310206 46.4406268759106) +OOL, Gold Coast, 8, mid, POINT(153.512876264303 -28.1665168540202) +ORN, Oran Es Senia, 8, mid, POINT(-0.60679696443112 35.6202747312734) +PAT, Lok Nayak Jaiprakash, 8, mid, POINT(85.0909021314663 25.5944434295605) +PDU, Paysandu, 8, mid, POINT(-58.0685346825257 -32.3614545292723) +PFO, Paphos Int'l, 8, major, POINT(32.4832322064926 34.7134012817335) +PLM, Sultan Mahmud Badaruddin II, 8, mid, POINT(104.699128326762 -2.89999345005997) +PTG, Polokwane Int'l, 8, mid, POINT(29.4533403645644 -23.858986270166) +PUJ, Punta Cana, 8, mid, POINT(-68.3632351074649 18.563039033987) +QRO, Queretaro Int'l, 8, mid, POINT(-100.18735943003 20.622466071278) +RAJ, Rajkot, 8, mid, POINT(70.7799548311565 22.3092816988361) +RIC, Richmond Int'l, 8, major, POINT(-77.333119638113 37.5082899750901) +RJH, Shah Makhdum, 8, mid, POINT(88.6138045704431 24.4448068623035) +ROC, Greater Rochester Int'l, 8, major, POINT(-77.6652445062197 43.1275519826482) +ROK, Rockhampton, 8, mid, POINT(150.478897 -23.378599) +ROV, Rostov-on-Don, 8, mid, POINT(39.8035144445391 47.2551119519754) +RTW, Saratov, 8, mid, POINT(46.035023249891 51.5606456508842) +SAP, Ramón Villeda Morales Int'l, 8, mid, POINT(-87.9272365125409 15.4558630524883) +SBA, Santa Barbara Muni., 8, mid, POINT(-119.8366015808 34.4257312978783) +SCC, Deadhorse, 8, mid, POINT(-148.457855 70.19751) +SFJ, Kangerlussuaq, 8, mid, POINT(-50.694199 67.018097) +SGF, Springfield Reg., 8, major, POINT(-93.3826379012003 37.2421444903024) +SHV, Shreveport Reg., 8, major, POINT(-93.8285222229503 32.4545798866513) +SIP, Simferopol Int'l, 8, major, POINT(33.9960529244537 45.0202173978165) +SIT, Sitka Rocky Gutierrez, 8, mid, POINT(-135.365692 57.05349) +SJD, Los Cabos Int'l, 8, major, POINT(-109.717858386909 23.1626574483597) +SLE, McNary Field, 8, major, POINT(-123.007871479404 44.9105138452142) +SLW, Plan de Guadalupe, 8, mid, POINT(-100.932260548587 25.5479976419974) +SNN, Shannon, 8, major, POINT(-8.92242885557686 52.6934537102532) +SON, Santo Pekoa Int'l, 8, mid, POINT(167.220894919375 -15.5055387370858) +SRG, Achmad Yani, 8, mid, POINT(110.378556255666 -6.97873484956982) +SXR, Srinagar, 8, [military,mid], POINT(74.7826243672311 33.9830909431623) +TAP, Tapachula Int'l, 8, mid, POINT(-92.370003 14.7911281338773) +TGD, Podgorica, 8, major, POINT(19.2466868618873 42.3679335195428) +TLH, Tallahassee Reg., 8, major, POINT(-84.3449953984858 30.3955576176938) +TRN, Turin Int'l, 8, major, POINT(7.64416230362133 45.1916600734642) +TYN, Taiyuan Wusu Int'l, 8, major, POINT(112.625891539315 37.7545117791512) +UAK, Narsarsuaq, 8, mid, POINT(-45.4164008923108 61.1625968337328) +UTP, U-Tapao, 8, [military,mid], POINT(101.00020929048 12.6852930912664) +VFA, Victoria Falls, 8, mid, POINT(25.8467677208826 -18.0990155983682) +VGA, Vijaywada, 8, mid, POINT(80.7973080000675 16.528642778235) +VNS, Varanasi, 8, mid, POINT(82.8538741913527 25.4499077329822) +VRA, Juan Gualberto Gomez, 8, major, POINT(-81.4367103850623 23.0395422339631) +VSA, Villahermosa, 8, mid, POINT(-92.8190675836262 17.9930660113111) +YBR, Brandon, 8, mid, POINT(-99.9458959002463 49.9047279410277) +YED, CFB Edmonton, 8, [military,major], POINT(-113.478839054497 53.6749156618668) +YFB, Iqaluit, 8, mid, POINT(-68.5367292441812 63.7511523537807) +YHM, John C. Munro Hamilton Int'l, 8, mid, POINT(-79.9264230959967 43.1633605305096) +YMM, Fort McMurray, 8, mid, POINT(-111.223840046617 56.6563171390962) +YNT, Yantai, 8, [major,military], POINT(121.372047417773 37.4077044726924) +YPE, Peace River, 8, mid, POINT(-117.443663208082 56.231924036745) +YQM, Greater Moncton Int'l, 8, mid, POINT(-64.6886696807361 46.1162059639259) +YQY, Sydney/J.A. Douglas McCurdy, 8, mid, POINT(-60.0469372117026 46.1673405890504) +YRB, Resolute Bay, 8, mid, POINT(-94.9708023244006 74.7181860987594) +YSM, Fort Smith, 8, mid, POINT(-111.961059938158 60.0198749602443) +YTH, Thompson, 8, mid, POINT(-97.860733 55.797482) +YTS, Timmins, 8, mid, POINT(-81.372047 48.566158) +YUT, Repulse Bay, 8, mid, POINT(-86.25 66.533302) +YVP, Kuujjuaq, 8, mid, POINT(-68.433342 58.101959) +YWK, Wabush, 8, mid, POINT(-66.873009 52.926071) +YXD, Edmonton City Centre, 8, mid, POINT(-113.522973688581 53.5709436582812) +YXJ, Fort St. John (N. Peace), 8, mid, POINT(-120.736439 56.246035) +YYB, North Bay/Jack Garland, 8, mid, POINT(-79.42491 46.358711) +ZAR, Zaria, 8, mid, POINT(7.68726764310577 11.1352958601071) +SKP, Skopje, 8, mid, POINT(21.6281971858229 41.9564546081544) +VE23, Burnpur, 8, mid, POINT(86.974546776573 23.6312179107764) +VIDX, Hindon Air Force Station, 8, mid, POINT(77.3507888779117 28.7077968601071) +, Sunchon, 8, major, POINT(125.890825057486 39.4119659710565) +EPLL, Łódź Władysław Reymont, 8, mid, POINT(19.4032148744037 51.72720704517) +BXJ, Alma Ata N.W., 8, [mid,military], POINT(76.8782640096648 43.3554190837919) +JMU, Jiamusi Dongjiao, 8, mid, POINT(130.456204704407 46.8430150223379) +MDG, Mudanjiang Hailang, 8, major, POINT(129.58015153222 44.5342936299935) +ULMM, Severomorsk-3 (Murmansk N.E.), 8, [military,major], POINT(33.2903527616285 69.0168711826804) +OSB, Mosul Int'l, 8, mid, POINT(43.145802 36.308601) +, Rostov N., 8, [military,mid], POINT(39.6353996343665 47.2774209202867) +, Rostov S.W., 8, mid, POINT(39.7972215345149 47.1158577255835) +OUL, Oulu, 8, mid, POINT(25.3728374704307 64.9287992358849) +BOD, Bordeaux, 8, major, POINT(-0.701793449075243 44.8321108662674) +CEQ, Mandelieu, 8, mid, POINT(6.95431612028937 43.546097987045) +DOL, St Gatien, 8, mid, POINT(0.158653528230218 49.3616609986609) +LIL, Lille-Lesquin, 8, mid, POINT(3.10596499799813 50.5716423929581) +TLS, Toulouse-Blagnac, 8, major, POINT(1.37350918551153 43.6304625661601) +FUK, Fukuoka, 8, major, POINT(130.444189541884 33.5848164332573) +HIW, Hiroshima-Nishi, 8, mid, POINT(132.419372741681 34.3713815628829) +NKM, Nagoya, 8, mid, POINT(136.91962838414 35.2540532052867) +SDJ, Sendai, 8, mid, POINT(140.930247381369 38.1382075615287) +KKN, Kirkenes Hoybuktmoen, 8, mid, POINT(29.8913489500406 69.7238318113692) +CGB, Marechal Rondon Int'l, 8, mid, POINT(-56.1201774754724 -15.6511470191955) +FLN, Hercilio Luz Int'l, 8, major, POINT(-48.5448122049599 -27.6646276941638) +JOI, Joinville-Lauro C. de Loyola, 8, mid, POINT(-48.8016498165616 -26.2242941374785) +JPA, Presidente Castro Pinto Int'l, 8, mid, POINT(-34.9488925911125 -7.14617462402047) +NAT, Augusto Severo Int'l, 8, major, POINT(-35.2488410165389 -5.89912054477116) +OPO, Francisco Sa Carneiro, 8, major, POINT(-8.67127240719647 41.2368708920452) +SLZ, Marechal Cunha Machado Int'l, 8, mid, POINT(-44.2362344700492 -2.58350921043019) +SSZ, Santos Air Force Base, 8, [military,mid], POINT(-46.3052704931003 -23.9237590410637) +THE, Teresina-Senador Petronio Portella, 8, mid, POINT(-42.8212402317845 -5.06346299167191) +VCP, Viracopos-Campinas Int'l, 8, mid, POINT(-47.1410791911014 -23.0096239085339) +VIX, Eurico de Aguiar Salles, 8, mid, POINT(-40.2885368759913 -20.2574162759418) +ALC, Alicante, 8, major, POINT(-0.557230440363588 38.2866408993929) +LEI, Almeria, 8, mid, POINT(-2.3716014405912 36.8477672709643) +VLC, Valencia, 8, mid, POINT(-0.473474930771676 39.4914597884489) +KRN, Kiruna_Airport, 8, mid, POINT(20.3351522954898 67.8256066056432) +NRK, Norrköping Airport, 8, major, POINT(16.2339407695814 58.5833805017541) +BDO, Husein Sastranegara Int'l, 8, mid, POINT(107.575611852209 -6.90042408353409) +ROS, Rosario – Islas Malvinas Int'l, 8, mid, POINT(-60.7800787216586 -32.9162269743812) +MCZ, Maceio/Zumbi dos Palmares Int'l, 8, mid, POINT(-35.7924951215833 -9.51494118540116) +SSH, Sharm el-Sheikh Int'l, 8, mid, POINT(34.3901189267288 27.9804044199168) +TCP, Taba Int'l, 8, mid, POINT(34.7758378996779 29.5944990568019) +AGR, Agra, 8, [major,military], POINT(77.960909176509 27.15772773475) +BDQ, Vadodara, 8, mid, POINT(73.2262889533239 22.3361640021171) +KSH, Shahid Ashrafi Esfahani, 8, mid, POINT(47.1565835165639 34.3464167739108) +BEN, Benina Int'l, 8, mid, POINT(20.2680398018516 32.0872774606553) +DHA, King Abdulaziz AB, 8, [military,major], POINT(50.1477245727844 26.2703680854768) +STY, Nueva Hespérides Int'l, 8, mid, POINT(-57.9840821176492 -31.4373883387798) +BAIK, Baikonur Cosmodrome, 8, spaceport, POINT(63.307354423875 45.9635739403124) +KSC, Kennedy Space Center, 8, spaceport, POINT(-80.6369680911892 28.5163704772027) +CSG, Centre Spatial Guyanais, 8, spaceport, POINT(-52.7684296893452 5.23941001258035) +AUA, Queen Beatrix Int'l, 7, mid, POINT(-70.0076228563496 12.5034643630297) +JIB, Djibouti-Ambouli Int'l, 7, mid, POINT(43.1497127859956 11.5521018230172) +IQQ, Diego Aracena Int'l, 7, [mid,military], POINT(-70.178635395533 -20.5478400878309) +SAW, Sabiha Gökçen Havaalani, 7, major, POINT(29.3095991423889 40.9043003553957) +KSA, Kosrae Island, 7, mid, POINT(162.957041225076 5.3520098571828) +FUN, Funafuti Int'l, 7, mid, POINT(179.19544202302 -8.52485415059424) +NAG, Dr. Babasaheb Ambedkar Int'l, 7, mid, POINT(79.0537976421986 21.0899317630087) +HKT, Phuket Int'l, 7, mid, POINT(98.3060384900559 8.10768475952735) +NAN, Nadi Int'l, 7, mid, POINT(177.451151198059 -17.7529129479792) +AGU, Lic. Jesús Terán Peredo Int'l, 7, mid, POINT(-102.314093740058 21.7013390329207) +ALL, Albenga, 7, mid, POINT(8.12314535436409 44.0458773598158) +AMM, Queen Alia Int'l, 7, major, POINT(35.989707162193 31.7226621600432) +ARI, Chacalluta Int'l, 7, mid, POINT(-70.3357301410959 -18.3492061639579) +ATR, Atar Int'l, 7, mid, POINT(-13.0511704323315 20.4982706101565) +BAQ, Ernesto Cortissoz Int'l, 7, mid, POINT(-74.776555978265 10.8866775959414) +BRC, Teniente Luis Candelaria Int'l, 7, mid, POINT(-71.1614300869763 -41.1459976958105) +BYK, Bouaké, 7, mid, POINT(-5.06894222275311 7.73610495555032) +BZE, Philip S. W. Goldson Int'l, 7, major, POINT(-88.3082064033075 17.5360686575521) +CRP, Corpus Christi Int'l, 7, major, POINT(-97.5022678710298 27.7744560700823) +CUR, Hato Int'l, 7, mid, POINT(-68.9568788072761 12.1848346052019) +CUZ, Velazco Astete Int'l, 7, major, POINT(-71.9436641449722 -13.5382186992639) +DAR, Julius Nyerere Int'l, 7, mid, POINT(39.2074715039165 -6.86672004249119) +DET, Detroit City, 7, mid, POINT(-83.0039681417733 42.4090938431907) +DIL, Presidente Nicolau Lobato Int'l, 7, mid, POINT(125.524854209182 -8.54931157414564) +DME, Moscow Domodedovo Int'l, 7, major, POINT(37.9002531289452 55.4141528223023) +DUD, Dunedin Int'l, 7, mid, POINT(170.200027 -45.923431) +DZA, Dzaoudzi Pamanzi Int'l, 7, mid, POINT(45.2817864197899 -12.8049474381643) +ELP, El Paso Int'l, 7, mid, POINT(-106.395714679366 31.7990860272589) +EVN, Zvartnots Int'l, 7, major, POINT(44.4000630536938 40.1523679451884) +FTW, Fort Worth Meacham Field, 7, major, POINT(-97.3551348561587 32.8207529047972) +GDT, JAGS McCartney Int'l, 7, mid, POINT(-71.1461337448876 21.4421237439063) +GLS, Scholes Int'l, 7, mid, POINT(-94.8554013876264 29.2671239212096) +GOM, Goma Int'l, 7, mid, POINT(29.2400534952228 -1.6583179500207) +GOU, Garoua Int'l, 7, mid, POINT(13.3724309377878 9.33068867678854) +GUM, Antonio B. Won Pat Int'l, 7, major, POINT(144.805850357093 13.4926462359465) +GYY, Gary/Chicago Int'l, 7, mid, POINT(-87.4083596247406 41.6177930015166) +HAH, Prince Said Ibrahim Int'l, 7, mid, POINT(43.2745612179616 -11.5366393829127) +HBA, Hobart Int'l, 7, mid, POINT(147.505996190408 -42.8376083694822) +HIR, Honiara Int'l, 7, mid, POINT(160.045855129925 -9.42757566400146) +IEV, Kiev Zhuliany Int'l, 7, mid, POINT(30.4451305182104 50.412808165985) +IKT, Irkutsk S.E., 7, [mid,military], POINT(104.355859748002 52.2728893882244) +IND, Indianapolis Int'l, 7, major, POINT(-86.2734003650885 39.7302043703969) +INU, Nauru Int'l, 7, mid, POINT(166.91613965882 -0.545037226856384) +IPC, Mataveri Int'l, 7, mid, POINT(-109.43006441001 -27.1587738388538) +JUJ, Gob. Horacio Guzman Int'l, 7, mid, POINT(-65.0937665458812 -24.3861010775846) +KHN, Nanchang Changbei Int'l, 7, mid, POINT(115.911979918602 28.8624891200666) +KMG, Kunming Wujiaba Int'l, 7, major, POINT(102.742117578823 24.999996110081) +LBA, Leeds Bradford, 7, major, POINT(-1.65983106734746 53.8690819474434) +LBV, Libreville Leon M'ba Int'l, 7, mid, POINT(9.41022337820712 0.457139229503759) +LFW, Lomé Tokoin, 7, mid, POINT(1.25093205640014 6.16687362722297) +LWO, Lviv Danylo Halytskyi Int'l, 7, [mid,military], POINT(23.9461269598944 49.8178506050005) +MAJ, Marshall Islands Int'l, 7, mid, POINT(171.281919370648 7.06811848557091) +MFM, Macau Int'l, 7, major, POINT(113.57451294862 22.1576572529634) +MGQ, Aden Adde Int'l, 7, mid, POINT(45.3036374186202 2.01635311214988) +MPM, Maputo Int'l, 7, mid, POINT(32.5741915194782 -25.924276711787) +MRU, Sir Seewoosagur Ramgoolam Int'l, 7, mid, POINT(57.6769860076636 -20.4317567793216) +NAP, Naples Int'l, 7, major, POINT(14.2828444340203 40.8780728843639) +NDB, Nouadhibou Int'l, 7, mid, POINT(-17.0334398691538 20.9290523064387) +NGB, Ningbo Lishe Int'l, 7, major, POINT(121.461819388484 29.8208231906861) +NKC, Nouakchott Int'l, 7, mid, POINT(-15.9519259252201 18.0979231718174) +NOU, La Tontouta Int'l, 7, mid, POINT(166.217232118699 -22.0136386248981) +OAK, Oakland Int'l, 7, major, POINT(-122.213261257863 37.7123036951691) +ONT, Ontario Int'l, 7, major, POINT(-117.592327651651 34.060191102066) +ORK, Cork, 7, major, POINT(-8.49014199983817 51.8485405419923) +PDG, Minangkabau Int'l, 7, mid, POINT(100.285455851791 -0.786045714026273) +PDL, João Paulo II, 7, mid, POINT(-25.6969882198711 37.7433316472933) +PEW, Bacha Khan Int'l, 7, mid, POINT(71.5188149912667 33.9914027889596) +PIK, Glasgow Prestwick, 7, mid, POINT(-4.61097163901068 55.5088918105142) +PMG, Ponta Porã Int'l, 7, mid, POINT(-55.7060793748573 -22.551786560876) +PMR, Palmerston N. Int'l, 7, mid, POINT(175.62128328196 -40.3233178852055) +PNI, Pohnpei Int'l, 7, mid, POINT(158.203304490964 6.98130676512123) +PPT, Tahiti Faa'a Int'l, 7, mid, POINT(-149.609757932429 -17.5594577659942) +PSA, Pisa Galileo Galilei Int'l, 7, [major,military], POINT(10.4001343718056 43.6983224157664) +PZU, Port Sudan, 7, [mid,military], POINT(37.216065757542 19.5760636531968) +RAI, Praia Int'l, 7, mid, POINT(-23.4862019883587 14.9449889352832) +RAK, Marrakech-Menara, 7, mid, POINT(-8.02460535907989 31.6022946597764) +RAR, Rarotonga Int'l, 7, mid, POINT(-159.798156308387 -21.2009821724632) +REP, Siem Reap Int'l, 7, major, POINT(103.815780528112 13.4087969693538) +RGA, Hermes Quijada Int'l, 7, mid, POINT(-67.7530268462675 -53.7814746058316) +RGL, Piloto Civil Norberto Fernandez Int'l, 7, mid, POINT(-69.3064711776731 -51.6116980855402) +RNO, Reno-Tahoe Int'l, 7, major, POINT(-119.775283308105 39.5058499014703) +ROR, Roman Tmetuchl Int'l, 7, mid, POINT(134.532953466159 7.3644955361292) +SID, Amilcar Cabral Int'l, 7, mid, POINT(-22.9440574079648 16.7347932693385) +SJJ, Sarajevo, 7, major, POINT(18.3366185457127 43.8258872246797) +SKB, Robert L. Bradshaw Int'l, 7, mid, POINT(-62.7142125047316 17.311125840442) +SLA, Martín Miguel de Güemes Int, 7, mid, POINT(-65.4784760437796 -24.8443742713315) +SPN, Saipan Int'l, 7, mid, POINT(145.723694658638 15.1215167197664) +SRE, Juana Azurduy de Padilla Int'l, 7, mid, POINT(-65.2928631387847 -19.0139157924657) +SXM, Princess Juliana Int'l, 7, major, POINT(-63.1122760858602 18.042244021474) +TAI, Ta'izz Int'l, 7, mid, POINT(44.134782731062 13.6854970025574) +TAO, Qingdao Liuting Int'l, 7, mid, POINT(120.380685949061 36.2677578081039) +TKK, Chuuk Int'l, 7, mid, POINT(151.842046037403 7.45761780288443) +TNG, Tangier Ibn Battouta, 7, mid, POINT(-5.91288087655914 35.7257656409274) +TRW, Bonriki Int'l, 7, mid, POINT(173.145990795301 1.3806686975383) +TSE, Astana Int'l, 7, major, POINT(71.4609441399936 51.0269352907712) +TSN, Tianjin Binhai Int'l, 7, major, POINT(117.352723159919 39.1294609909008) +TSV, Townsville, 7, [major,military], POINT(146.77067890477 -19.2561814376212) +TUC, Teniente Gen. Benjamin Matienzo Int'l, 7, mid, POINT(-65.1081246236248 -26.8357310050714) +TUN, Aeroport Tunis, 7, major, POINT(10.2176992447111 36.8474482177219) +TUS, Tucson Int'l, 7, major, POINT(-110.937713232132 32.1203523441898) +ULN, Chinggis Khaan Int'l, 7, mid, POINT(106.762873994929 47.8525260966684) +URC, Ürümqi Diwopu Int'l, 7, major, POINT(87.4671298487808 43.8983382193653) +VLI, Bauerfield Int'l, 7, mid, POINT(168.319622739662 -17.7016990681781) +WWK, Wewak Int'l, 7, mid, POINT(143.669102299698 -3.58022689444744) +XCR, Châlons Vatry, 7, [military,mid], POINT(4.19111982574289 48.7803946138566) +XMN, Xiamen Gaoqi Int'l, 7, major, POINT(118.12696884672 24.537192570557) +YAP, Yap Int'l, 7, mid, POINT(138.086430283619 9.49791733361348) +ZLO, Playa de Oro Int'l, 7, mid, POINT(-104.560095200097 19.1480860285854) +CAY, Cayenne – Rochambeau, 7, mid, POINT(-52.3638068572357 4.82126714308924) +UIII, Irkutsk N.W., 7, mid, POINT(104.197359284494 52.3616476700131) +SJW, Shijiazhuang Zhengding Int'l, 7, major, POINT(114.692266598902 38.278140913112) +GYD, Heydar Aliyev Int'l, 7, major, POINT(50.0498394867405 40.462746883908) +LAK, Lakatamia Airbase, 7, [military,mid], POINT(33.322201334899 35.1063448067362) +CFB, Cabo Frio Int'l, 7, mid, POINT(-42.0792517520184 -22.9256317091328) +HEM, Helsinki-Malmi, 7, mid, POINT(25.0455353698315 60.2493778499587) +LUX, Luxembourg-Findel, 7, major, POINT(6.21642121728731 49.6343040925102) +VCE, Venice Marco Polo, 7, major, POINT(12.3410673004369 45.5048477588455) +YNY, Yangyang Int'l, 7, mid, POINT(128.66298866884 38.0587824162585) +TBT, Tabatinga Int'l, 7, mid, POINT(-69.939473933909 -4.25032469493379) +BVB, Boa Vista Int'l, 7, mid, POINT(-60.6922206338682 2.84119534121157) +LPA, Gran Canaria, 7, major, POINT(-15.3899245158461 27.9368899716574) +ING, Com. Armando Tola Int'l, 7, mid, POINT(-72.0538569101296 -50.2839008690038) +NYO, Stockholm-Skavsta, 7, mid, POINT(16.9216055584254 58.7851041303448) +MES, Polonia Int'l, 7, mid, POINT(98.6761925714641 3.56659179990894) +BGF, Bangui M'Poko Int'l, 7, mid, POINT(18.524123630208 4.39885153695957) +HGH, Hangzhou Xiaoshan Int'l, 7, major, POINT(120.432097376313 30.2351862790414) +CXI, Cassidy Int'l, 7, mid, POINT(-157.34977789343 1.98616119792402) +SQQ, Šiauliai Int'l, 7, mid, POINT(23.3831885738691 55.90376945404) +IUE, Niue Int'l, 7, mid, POINT(-169.926129774217 -19.0767129354511) +AGT, Guaraní Int'l, 7, mid, POINT(-54.8393995296062 -25.4568570715812) +AQP, Rodríguez Ballón Int'l, 7, mid, POINT(-71.5679335385285 -16.344552065352) +VVO, Vladivostok Int'l, 7, [mid,military], POINT(132.139841720715 43.3776492533885) +PRN, Pristina, 7, major, POINT(21.0302690124746 42.5850331153448) +ANR, Deurne, 6, mid, POINT(4.45092277399909 51.1891285063806) +LAP, Gen. Márquez de León Int'l, 6, mid, POINT(-110.367197859809 24.0760903521803) +HRB, Harbin Taiping, 6, major, POINT(126.236983030863 45.6206011723245) +TRV, Trivandrum Int'l, 6, mid, POINT(76.9189025612913 8.47650993894514) +ADB, Adnan Menderes, 6, major, POINT(27.1492975952664 38.2912347645175) +NKG, Nanjing Lukou Int'l, 6, major, POINT(118.866102146906 31.7353249296177) +FPO, Freeport Int'l, 6, mid, POINT(-78.7039343114497 26.548246747189) +TIP, Tripoli Int'l, 6, major, POINT(13.1442589810713 32.6691695504993) +YQX, Gander Int'l, 6, mid, POINT(-54.5755719093578 48.9465980060736) +DOH, Doha Int'l, 6, [major,military], POINT(51.5585487876547 25.2682461310506) +ABQ, Albuquerque Int'l, 6, major, POINT(-106.6166851616 35.0491578018276) +ANU, V.C. Bird Int'l, 6, mid, POINT(-61.7923676698358 17.1403599371617) +APW, Faleolo, 6, mid, POINT(-171.99732221834 -13.8325013323956) +ATZ, Asyut, 6, mid, POINT(31.0162490438011 27.0508158406978) +BAH, Bahrain Int'l, 6, major, POINT(50.6260028757534 26.2696971499497) +BDL, Bradley Int'l, 6, major, POINT(-72.685394743339 41.9303160058352) +BGI, Grantley Adams Int'l, 6, mid, POINT(-59.4874188953158 13.079661104553) +BJL, Yundum Int'l, 6, mid, POINT(-16.6523132698075 13.3438604788942) +BJM, Bujumbura Int'l, 6, mid, POINT(29.3209840169939 -3.32204434913113) +BLZ, Chileka Int'l, 6, mid, POINT(34.9719441837933 -15.6813844793272) +BME, Broome Int'l, 6, mid, POINT(122.233850515022 -17.952576129268) +BND, Bandar Abbass Int'l, 6, mid, POINT(56.368886456411 27.2103258455145) +BSR, Basrah Int'l, 6, major, POINT(47.6683766633518 30.552799016106) +CJS, Ciudad Juarez Int'l, 6, mid, POINT(-106.435846631055 31.6357566201951) +CMB, Katunayake Int'l, 6, major, POINT(79.8852573421506 7.17807710544221) +CNS, Cairns Int'l, 6, mid, POINT(145.7535848444 -16.8767421554062) +CNX, Chiang Mai Int'l, 6, major, POINT(98.9681181241593 18.7688473919675) +COS, City of Colorado Springs, 6, major, POINT(-104.700880274111 38.7974248779125) +CPE, Ign. Alberto Ongay Int'l, 6, mid, POINT(-90.5036283734038 19.8142247992074) +CSX, Changsha Huanghua Int'l, 6, major, POINT(113.214054203252 28.1899218619451) +CVG, Greater Cincinnati Int'l, 6, major, POINT(-84.6561699153392 39.055418904783) +DAD, Da Nang, 6, major, POINT(108.202706257936 16.053144145167) +DAL, Dallas Love Field, 6, major, POINT(-96.84986377098 32.8444253732738) +DAM, Damascus Int'l, 6, major, POINT(36.5128954718126 33.4114366702732) +DAV, Enrique Malek Int'l, 6, mid, POINT(-82.4317583369387 8.39126106116917) +DIR, Aba Tenna D. Yilma Int'l, 6, mid, POINT(41.857756722253 9.61267784753569) +DPS, Bali Int'l, 6, major, POINT(115.162322961107 -8.74475731595652) +DSM, Des Moines Int'l, 6, major, POINT(-93.6484612563736 41.5327904242113) +EBB, Entebbe Int'l, 6, mid, POINT(32.4427573135214 0.044940949388672) +FKI, Kisangani Bangoka Int'l, 6, mid, POINT(25.3302714896212 0.492225136917501) +FOC, Fuzhou Changle Int'l, 6, mid, POINT(119.668043820999 25.9318233148143) +GAU, Lokpriya G. Bordoloi Int'l, 6, mid, POINT(91.588229058187 26.1052475924255) +GDN, Gdansk Lech Walesa, 6, major, POINT(18.4684422165911 54.3807025352925) +GND, Point Salines Int'l, 6, mid, POINT(-61.7858529909285 12.0072683054283) +GOJ, Nizhny Novgorod Int'l, 6, mid, POINT(43.7896337062935 56.2185525910656) +GYM, Gen. José M. Yáñez Int'l, 6, mid, POINT(-110.921651270402 27.9694553962829) +HET, Hohhot Baita Int'l, 6, mid, POINT(111.814681821626 40.8540600906552) +HLN, Helena Reg., 6, mid, POINT(-111.989896896008 46.6102043529) +HMO, Gen. Ignacio P. Garcia Int'l, 6, mid, POINT(-111.051901711819 29.0900772523445) +IAD, Dulles Int'l, 6, major, POINT(-77.4477925769206 38.952774037953) +ITO, Hilo Int'l, 6, mid, POINT(-155.039629733435 19.7147976868663) +JAN, Jackson Int'l, 6, major, POINT(-90.0750986276924 32.3100600273635) +JAX, Jacksonville Int'l, 6, major, POINT(-81.6835767278311 30.491352730948) +KCH, Kuching Int'l, 6, mid, POINT(110.341837054315 1.4872079377901) +KGL, Kigali Int'l, 6, mid, POINT(30.1348768187856 -1.96365443664138) +KRK, Kraków-Balice, 6, major, POINT(19.8009772844504 50.0722630648331) +KUF, Kurumoch, 6, major, POINT(50.1472655210191 53.5083848190935) +KWL, Guilin Liangjiang Int'l, 6, major, POINT(110.04689349777 25.2176055252293) +LAO, Laoag Int'l, 6, mid, POINT(120.533876196127 18.1824180866379) +LGA, LaGuardia, 6, major, POINT(-73.8719858204814 40.7745539398858) +LGW, London Gatwick, 6, major, POINT(-0.162961639139456 51.1557567519275) +LJU, Ljubljana, 6, major, POINT(14.4548126283266 46.2305445554486) +LKO, Amausi Int'l, 6, mid, POINT(80.8841719732472 26.7639328700916) +LPG, La Plata, 6, mid, POINT(-57.895382063651 -34.9655441559234) +MAM, Gen. Sevando Canales, 6, mid, POINT(-97.5308217121187 25.7708412640619) +MAN, Manchester Int'l, 6, major, POINT(-2.27337159069427 53.3624896066518) +MCI, Kansas City Int'l, 6, major, POINT(-94.7159148579154 39.2978958263659) +MCT, Seeb Int'l, 6, major, POINT(58.2904804753493 23.5885704175856) +MIR, Habib Bourguiba Int'l, 6, mid, POINT(10.753368185054 35.760710442178) +MRS, Marseille Provence Airport, 6, major, POINT(5.22137917720337 43.4410600016468) +NLD, Quetzalcoatl Int'l, 6, mid, POINT(-99.5680081930063 27.4496896508316) +NNG, Nanning Wuwu Int'l, 6, major, POINT(108.168012273331 22.6120370541785) +OAX, Xoxocotlán Int'l, 6, mid, POINT(-96.7217959384975 17.0005592569745) +OGG, Kahului, 6, mid, POINT(-156.437429581353 20.8932885151112) +OKC, Will Rogers, 6, major, POINT(-97.5961177542092 35.3952774911744) +ORF, Norfolk Int'l, 6, major, POINT(-76.2044231712327 36.8982394673674) +PBI, Palm Beach Int'l, 6, major, POINT(-80.0901893383387 26.688441666433) +PBM, Pengel Int'l, 6, mid, POINT(-55.1999113892902 5.45599967797439) +PEE, Bolshesavino, 6, mid, POINT(56.0195602820297 57.9197711231691) +PEN, Penang Int'l, 6, mid, POINT(100.265786380955 5.29265627790489) +PHC, Port Harcourt Int'l, 6, mid, POINT(6.94989742723191 5.00700347673943) +PHE, Port Hedland Int'l, 6, mid, POINT(118.631797815615 -20.3781272960723) +PIR, Pierre Regional, 6, mid, POINT(-100.292641981705 44.3801534668762) +PIT, Greater Pittsburgh Int'l, 6, major, POINT(-80.2561290571918 40.4960518915285) +PPG, Pago Pago Int'l, 6, mid, POINT(-170.713307053734 -14.3290641850306) +BHX, Birmingham Int'l, 6, major, POINT(-1.73373170434452 52.4529085542838) +ROB, Roberts Int'l, 6, mid, POINT(-10.3530851867934 6.24183456554525) +RPR, Raipur, 6, mid, POINT(81.7403775915201 21.1859868561447) +SAL, El Salvador Int'l, 6, mid, POINT(-89.0572035692743 13.4447481228616) +SAN, San Diego Int'l, 6, major, POINT(-117.197511025731 32.7322645570132) +SAT, San Antonio Int'l, 6, major, POINT(-98.4719699991559 29.5266203391315) +SAV, Savannah Int'l, 6, major, POINT(-81.2099647750913 32.1356415522902) +SCU, Antonio Maceo, 6, mid, POINT(-75.8398877639791 19.9724288717622) +SLP, Ponciano Arriaga Int'l, 6, mid, POINT(-100.936477816267 22.2557130495903) +SMF, Sacramento Int'l, 6, major, POINT(-121.587894877723 38.6927238925554) +STI, Cibao Int'l, 6, mid, POINT(-70.6941783224468 19.4659219152888) +SVX, Koltsovo, 6, major, POINT(60.8058033432174 56.732245612046) +SYR, Syracuse Hancock Int'l, 6, major, POINT(-76.1130789991049 43.1317844943741) +TBZ, Tabriz, 6, mid, POINT(46.244713373574 38.1311107688175) +TRC, Torreon Int'l, 6, mid, POINT(-103.398787828579 25.5632164399896) +TUL, Tulsa Int'l, 6, major, POINT(-95.889882271542 36.190127565195) +TYS, Mcghee Tyson, 6, major, POINT(-83.9899378327585 35.8057448027088) +UFA, Ufa Int'l, 6, major, POINT(55.8840773411837 54.5651323578972) +UVF, Hewanorra Int'l, 6, mid, POINT(-60.9499737723461 13.7365238050489) +WDH, Windhoek Hosea Kutako Int'l, 6, mid, POINT(17.4632259028133 -22.4869531202041) +YAM, Sault Ste Marie, 6, mid, POINT(-84.5006089999717 46.4854175101926) +YDQ, Dawson Cr., 6, mid, POINT(-120.185595619101 55.7394117074557) +YEG, Edmonton Int'l, 6, major, POINT(-113.584492564406 53.3072001619183) +YHZ, Halifax Int'l, 6, major, POINT(-63.5149652501673 44.886545450101) +YKA, Kamloops, 6, mid, POINT(-120.441734763962 50.7051955184591) +YSB, Sudbury, 6, mid, POINT(-80.7957747817105 46.6227508204893) +YSJ, Saint John, 6, mid, POINT(-65.8905573681168 45.3292305955017) +YXS, Prince George, 6, mid, POINT(-122.674014743986 53.8842485751138) +YYJ, Victoria Int'l, 6, major, POINT(-123.430624539528 48.640529482179) +ZAM, Zamboanga Int'l, 6, mid, POINT(122.062432321637 6.9197577480583) +ZGC, Lanzhou Zhongchuan, 6, mid, POINT(103.615415363043 36.5078842461237) +ALB, Albany Int'l, 6, mid, POINT(-73.8093518843173 42.7456619801729) +MKE, General Mitchell Int'l, 6, major, POINT(-87.9021056250744 42.9479198729586) +ZHHH, Wang-Chia Tun Airbase, 6, [military,mid], POINT(114.24694737615 30.6017141196702) +SYX, Sanya Phoenix Int'l, 6, major, POINT(109.40823949108 18.3090959908593) +LXA, Lhasa Gonggar, 6, mid, POINT(90.9005610194027 29.2936936123184) +HTN, Hotan, 6, mid, POINT(79.8723005212191 37.0400363509765) +DRS, Dresden, 6, major, POINT(13.7649671440047 51.1250912428871) +NNA, Kenitra Air Base, 6, [military,major], POINT(-6.597753628116 34.2986673638223) +QNJ, Annemasse, 6, mid, POINT(6.26491085364159 46.1957283286261) +NOG, Nogales Int'l, 6, mid, POINT(-110.972721301675 31.2255371741159) +SXB, Strasbourg, 6, mid, POINT(7.62784196688924 48.5446961721759) +CGN, Cologne/Bonn, 6, major, POINT(7.12235975524539 50.8782596629471) +PUS, Kimhae Int'l, 6, major, POINT(128.948801379039 35.1702840636829) +CJU, Jeju Int'l, 6, major, POINT(126.491629401972 33.5247173150399) +SVG, Stavanger Sola, 6, major, POINT(5.6298103297218 58.8821564842185) +TRD, Trondheim Vaernes, 6, major, POINT(10.9168095241445 63.472029381717) +CMG, Corumbá Int'l, 6, mid, POINT(-57.6636078925543 -19.0141662885534) +FNC, Madeira, 6, mid, POINT(-16.7756374531213 32.6933642847489) +IGU, Foz do Iguaçu Int'l, 6, mid, POINT(-54.4885922735633 -25.5976832162102) +PVH, Gov. Jorge Teixeira de Oliveira Int'l, 6, mid, POINT(-63.8984625004213 -8.71442482859288) +BIO, Bilbao, 6, mid, POINT(-2.90609011679805 43.3050829811195) +PMI, Palma de Mallorca, 6, major, POINT(2.72997660200647 39.5657758586254) +TFN, Tenerife N., 6, major, POINT(-16.3463175679264 28.4875770267731) +GOT, Gothenburg, 6, major, POINT(12.2938269092573 57.6857493534879) +LLA, Lulea, 6, major, POINT(22.1230271243945 65.5490362477616) +AUH, Abu Dhabi Int'l, 6, major, POINT(54.6463293225558 24.4272271529764) +CZL, Mohamed Boudiaf Int'l, 6, mid, POINT(6.62194665181219 36.2834409441601) +ASW, Aswan Int'l, 6, mid, POINT(32.8244372462973 23.9682765441778) +RVN, Rovaniemi, 6, mid, POINT(25.8294409760452 66.5595564168509) +GEO, Cheddi Jagan Int'l, 6, mid, POINT(-58.2541191925889 6.49855290813572) +COK, Cochin Int'l, 6, major, POINT(76.3905198502024 10.1551187628118) +EDL, Eldoret Int'l, 6, mid, POINT(35.2236930658301 0.40507147546036) +ICN, Incheon Int'l, 6, major, POINT(126.450875980796 37.4492088624346) +CUL, Federal de Bachigualato Int'l, 6, mid, POINT(-107.469863792896 24.7668040390461) +ISB, Benazir Bhutto Int'l, 6, [major,military], POINT(73.1007936471882 33.6074457507526) +BRU, Brussels, 5, major, POINT(4.48464032408272 50.8972949641511) +ABV, Abuja Int'l, 5, major, POINT(7.27025993974356 9.00437659781094) +ACV, Arcata-Eureka, 5, mid, POINT(-124.107065520139 40.9719245381314) +AUS, Austin-Bergstrom Int'l, 5, major, POINT(-97.6668367646054 30.2021081920749) +AYT, Antalya, 5, major, POINT(30.8025526439415 36.9153233051868) +BFS, Belfast Int'l, 5, major, POINT(-6.21616943734958 54.6615575470103) +BGY, Orio Al Serio, 5, major, POINT(9.6989176939974 45.6654980560695) +BKI, Kota Kinabalu Int'l, 5, mid, POINT(116.051087873369 5.92289445474807) +BLR, Bengaluru Int'l, 5, major, POINT(77.7095579889575 13.2006108069609) +CBR, Canberra Int'l, 5, major, POINT(149.190760539671 -35.3071855902909) +CMH, Port Columbus Int'l, 5, major, POINT(-82.8840306426634 39.9981181922432) +CMN, Mohamed V Int'l, 5, major, POINT(-7.5814559902572 33.3747274815396) +DUS, Düsseldorf Int'l, 5, major, POINT(6.76494446612174 51.2781820420774) +ESB, Esenboğa Int'l, 5, major, POINT(32.9930100772014 40.1151278273234) +HLZ, Hamilton Int'l, 5, mid, POINT(175.336221432708 -37.8658411484827) +HYD, Rajiv Gandhi Int'l, 5, major, POINT(78.42953613452 17.2359831507471) +JFK, John F Kennedy Int'l, 5, major, POINT(-73.7863268609295 40.6459595584081) +KBP, Boryspil Int'l, 5, major, POINT(30.8951621615528 50.340902338877) +KRT, Khartoum, 5, major, POINT(32.550153296633 15.5922226530858) +MSN, Dane Cty. Reg. (Truax Field), 5, major, POINT(-89.3457847894487 43.1363082385868) +MSQ, Minsk Int'l, 5, major, POINT(28.0341933346378 53.8893792398005) +PMO, Palermo, 5, major, POINT(13.1055309888638 38.1863351084895) +PVD, T.F. Green, 5, mid, POINT(-71.4357841445789 41.7260019847189) +RSW, Southwest Florida Int'l, 5, major, POINT(-81.7551231409306 26.5279288067651) +SHE, Shenyang Taoxian Int'l, 5, major, POINT(123.487974430338 41.6347891339582) +SHJ, Sharjah Int'l, 5, major, POINT(55.5205071948853 25.3211964019068) +SJC, San Jose Int'l, 5, major, POINT(-121.929428983532 37.3694905908965) +SNA, John Wayne, 5, major, POINT(-117.861489220393 33.6794857329549) +STR, Stuttgart, 5, major, POINT(9.19395108945536 48.6901051358913) +SYQ, Nacional Tobías Bolaños, 5, mid, POINT(-84.1386091971594 9.95827851919623) +SZX, Shenzhen Bao'an Int'l, 5, major, POINT(113.815852751085 22.6465077147868) +SDF, Louisville Int'l, 5, major, POINT(-85.7417027597367 38.1860207152699) +GVA, Geneva, 5, major, POINT(6.10794577423603 46.231009510158) +LYS, Lyon-Saint Exupery, 5, mid, POINT(5.07594431813459 45.7210186834669) +KIX, Kansai Int'l, 5, major, POINT(135.244459772476 34.4347941629269) +LIS, Lisbon Portela, 5, major, POINT(-9.13069440931071 38.7707623427514) +CNF, Tancredo Neves Int'l, 5, major, POINT(-43.9635815209949 -19.6327821218747) +BMA, Bromma, 5, mid, POINT(17.9456175406145 59.3555902065112) +SUB, Juanda Int'l, 5, major, POINT(112.777034594933 -7.383578985276) +MDQ, Astor Piazzolla Int'l, 5, mid, POINT(-57.5816150932392 -37.9332161204482) +GCM, Owen Roberts Int'l, 5, major, POINT(-81.3576706162289 19.2959107437122) +CGO, Zhengzhou Xinzheng Int'l, 5, major, POINT(113.841831302845 34.5263027198957) +DLC, Dalian Zhoushuizi Int'l, 5, major, POINT(121.538913780101 38.9615702300222) +HER, Heraklion Int'l, 5, major, POINT(25.1740558243272 35.3369024101045) +TBS, Tbilisi Int'l, 5, major, POINT(44.9646146141664 41.6694420187261) +XXC, Cascais, 5, mid, POINT(-9.35458240263928 38.7235353208323) +KHH, Kaohsiung Int'l, 4, major, POINT(120.345156342151 22.5717061054422) +SKO, Sadiq Abubakar III, 4, mid, POINT(5.20022616032651 12.9174824166181) +UIO, Mariscal Sucre Int'l, 4, mid, POINT(-78.4899925545701 -0.145552408466882) +KHI, Karachi Civil, 4, mid, POINT(67.1521283592947 24.8985243689595) +KIV, Kishinev S.E., 4, mid, POINT(28.9360487562255 46.9341619900391) +LIM, Jorge Chávez, 4, major, POINT(-77.1075656931342 -12.0237161502221) +YQT, Thunder Bay Int'l, 4, mid, POINT(-89.3121421238136 48.3718811492508) +VNO, Vilnius, 4, major, POINT(25.2807164497285 54.6430549307542) +XIY, Hsien Yang, 4, major, POINT(108.755811342151 34.4429391054422) +NTR, Del Norte Int'l, 4, mid, POINT(-100.238394186577 25.859873767729) +TBU, Fua'amotu Int'l, 4, mid, POINT(-175.135635 -21.24861) +IFN, Esfahan Int'l, 4, mid, POINT(51.8763916812681 32.7460805344321) +HRE, Harare Int'l, 4, mid, POINT(31.1014 -17.9228) +KWI, Kuwait Int'l, 4, major, POINT(47.9714825593316 29.2396800581583) +YOW, Macdonald-Cartier Int'l, 4, major, POINT(-75.6648933870205 45.3201348196531) +KBL, Kabul Int'l, 4, mid, POINT(69.2100736270874 34.5633978864149) +ABJ, Abidjan Port Bouet, 4, mid, POINT(-3.93221929167636 5.2543984451492) +ACA, General Juan N Alvarez Int'l, 4, major, POINT(-99.7545085619681 16.76196735278) +ACC, Kotoka Int'l, 4, major, POINT(-0.171402855660817 5.60698152381193) +ADD, Bole Int'l, 4, mid, POINT(38.7931904366343 8.98173027581099) +ADE, Aden Int'l, 4, mid, POINT(45.030602 12.8278) +ADL, Adelaide Int'l, 4, mid, POINT(138.532101457699 -34.9405860275154) +ALA, Almaty Int'l, 4, major, POINT(77.0120458771175 43.3464943144793) +ALG, Houari Boumediene, 4, major, POINT(3.21207353516506 36.6997206663535) +ALP, Aleppo Int'l, 4, major, POINT(37.2273414057828 36.1846237314314) +AMD, Sardar Vallabhbhai Patel Int'l, 4, mid, POINT(72.6209000884332 23.0707454635881) +ANF, Cerro Moreno Int'l, 4, mid, POINT(-70.4409908509407 -23.4489545248317) +ASB, Ashkhabad Northwest, 4, mid, POINT(58.3639659208246 37.984853438957) +ASM, Yohannes Iv Int'l, 4, mid, POINT(38.9063540136321 15.2936159696499) +ASU, Silvio Pettirossi Int'l, 4, mid, POINT(-57.5139078247136 -25.2416592533816) +BDA, Bermuda Int'l, 4, mid, POINT(-64.7027740686514 32.3591739601581) +BEG, Surcin, 4, major, POINT(20.2912845946621 44.8190766654433) +BEY, Beirut Int'l, 4, major, POINT(35.4930853618161 33.8254400618668) +BHO, Bairagarh, 4, mid, POINT(77.3408714713579 23.2855684869809) +BKO, Bamako Sénou, 4, mid, POINT(-7.94727226970801 12.5393363425867) +BNA, Nashville Int'l, 4, major, POINT(-86.6692867356375 36.1314876361697) +BNE, Brisbane Int'l, 4, major, POINT(153.120256418844 -27.3853965939099) +BOI, Boise Air Terminal, 4, major, POINT(-116.221841070549 43.5689592234704) +BRW, Wiley Post Will Rogers Mem., 4, mid, POINT(-156.771835 71.289299) +BUF, Greater Buffalo Int'l, 4, major, POINT(-78.7319965523308 42.9340337493526) +BUQ, Bulawayo, 4, mid, POINT(28.622552042904 -20.0155684094908) +BWN, Brunei Int'l, 4, major, POINT(114.933119029209 4.94547528227685) +CAN, Guangzhou Baiyun Int'l, 4, major, POINT(113.297516552171 23.3891511573243) +CCP, Carriel Sur Int'l, 4, mid, POINT(-73.0621061746214 -36.7763727437881) +CCU, Netaji Subhash Chandra Bose Int'l, 4, major, POINT(88.4400010130197 22.6453893785064) +CGP, Chittagong, 4, mid, POINT(91.8147107162383 22.2455658585738) +CHC, Christchurch Int'l, 4, major, POINT(172.538675565223 -43.4885486784104) +CKY, Conakry, 4, mid, POINT(-13.6210656251671 9.57418115850082) +CLE, Hopkins Int'l, 4, major, POINT(-81.8384406064046 41.4111916124966) +CLO, Alfonso Bonilla Aragón Int'l, 4, mid, POINT(-76.3850714728091 3.54328635123219) +COO, Cotonou Cadjehon, 4, mid, POINT(2.3838000724352 6.3582465034691) +COR, Ingeniero Ambrosio L.V. Taravella Int'l, 4, mid, POINT(-64.2123157670801 -31.3156811684889) +CTG, Rafael Nunez, 4, mid, POINT(-75.5123349559682 10.4449381764915) +CUN, Cancún, 4, major, POINT(-86.8744172506694 21.04019667144) +CUU, General R F Villalobos Int'l, 4, mid, POINT(-105.969204692629 28.7039984997679) +DAC, Zia Int'l Dhaka, 4, mid, POINT(90.4049241599237 23.8481243218127) +DRW, Darwin Int'l, 4, [major,military], POINT(130.877501436774 -12.4080559966556) +DUR, Louis Botha, 4, mid, POINT(30.9457081940881 -29.965914250828) +FBM, Lubumbashi Luano Int'l, 4, mid, POINT(27.5292 -11.5908) +FEZ, Saiss, 4, mid, POINT(-4.98214637678303 33.9305251844673) +FIH, Kinshasa N Djili Int'l, 4, mid, POINT(15.4465162074561 -4.38916882197582) +FNA, Freetown Lungi, 4, mid, POINT(-13.2002296786483 8.61542361726369) +FNJ, Sunan, 4, mid, POINT(125.675321571201 39.2001771667656) +FRU, Vasilyevka, 4, major, POINT(74.468800339909 43.0554527233303) +GBE, Sir Seretse Khama Int'l, 4, mid, POINT(25.9243808264147 -24.5580718089441) +GDL, Don Miguel Hidalgo Int'l, 4, major, POINT(-103.300766222752 20.5246863485173) +GLA, Glasgow Int'l, 4, major, POINT(-4.43167796995107 55.8641828570355) +GUA, La Aurora, 4, mid, POINT(-90.530181111378 14.5881608290051) +GYE, Simon Bolivar Int'l, 4, mid, POINT(-79.887009643933 -2.15833790699136) +HAN, Noi Bai, 4, major, POINT(105.803759436806 21.2145596707245) +HAV, José Martí Int'l, 4, major, POINT(-82.4074206289499 22.9973533364428) +HBE, Borg El Arab Int'l, 4, mid, POINT(29.69266601523 30.9183712786239) +JED, King Abdul Aziz Int'l, 4, major, POINT(39.1504996780448 21.6706857878128) +KAN, Kano Mallam Aminu Int'l, 4, mid, POINT(8.52213718395767 12.0457071601746) +KHG, Kashi, 4, mid, POINT(76.0130148060075 39.5379686306258) +KIN, Norman Manley Int'l, 4, major, POINT(-76.7786897616576 17.9375751552752) +KTM, Tribhuvan Int'l, 4, mid, POINT(85.357139531668 27.7002816751609) +LAD, Luanda 4 de Fevereiro, 4, mid, POINT(13.2347957502699 -8.84831327917379) +LED, Pulkovo 2, 4, major, POINT(30.3070976454648 59.8054061601897) +LHE, Allama Iqbal Int'l, 4, mid, POINT(74.4108810181748 31.5206296518206) +LLW, Kamuzu Int'l, 4, mid, POINT(33.7827885019788 -13.788622823746) +LOS, Lagos Murtala Muhammed, 4, major, POINT(3.32112435281334 6.57825944540467) +LPB, El Alto Int'l, 4, mid, POINT(-68.1780055277945 -16.5098792213977) +LUN, Lusaka Int'l, 4, mid, POINT(28.4455443211019 -15.3268522509447) +LXR, Luxor, 4, mid, POINT(32.7032970848623 25.6730347786023) +MAA, Chennai Int'l, 4, major, POINT(80.1637759731545 12.9825301669154) +MAR, La Chinita Int'l, 4, mid, POINT(-71.7237688094687 10.5557594684972) +MDE, José María Córdova, 4, mid, POINT(-75.4269557399772 6.171001614358) +MEM, Memphis Int'l, 4, major, POINT(-89.9816280353237 35.0444101240089) +MGA, Augusto Cesar Sandino Int'l, 4, mid, POINT(-86.1712846229543 12.144635873435) +MHD, Mashhad, 4, major, POINT(59.6421943574029 36.2275503134984) +MIA, Miami Int'l, 4, major, POINT(-80.2789718277441 25.7949407212406) +MID, Lic M Crecencio Rejon Int'l, 4, mid, POINT(-89.6630235736434 20.9338603864296) +MLA, Luqa, 4, major, POINT(14.4952644555055 35.8489307943501) +MBA, Moi Int'l, 4, major, POINT(39.6026631870383 -4.03265262579657) +MSU, Moshoeshoe I Int'l, 4, mid, POINT(27.5592160333614 -29.4555740046101) +MSY, New Orleans Int'l, 4, major, POINT(-90.2566939480594 29.9851141460622) +MUX, Multan, 4, [major,military], POINT(71.418995432932 30.1950780904965) +MVD, Carrasco Int'l, 4, major, POINT(-56.026636146282 -34.8410485988569) +MZT, General Rafael Buelna Int'l, 4, mid, POINT(-106.270016617885 23.1665960971344) +NAS, Nassau Int'l, 4, major, POINT(-77.4648472290944 25.0486910600866) +NDJ, Ndjamena, 4, mid, POINT(15.0330446385559 12.1295400184115) +NIM, Niamey, 4, mid, POINT(2.17730671184125 13.4767572807942) +CEB, Mactan-Cebu Int'l, 4, major, POINT(123.979134508664 10.3158756727292) +NOV, Nova Lisboa, 4, mid, POINT(15.7497618459595 -12.8025414575915) +OMA, Eppley Airfield, 4, mid, POINT(-95.8994157953121 41.2997111453012) +OME, Nome, 4, mid, POINT(-165.441641712281 64.5072207026631) +OUA, Ouagadougou, 4, mid, POINT(-1.51380536165114 12.3535800260473) +PAP, Mais Gate Int'l, 4, mid, POINT(-72.2944780260473 18.5756829054286) +PBC, Puebla, 4, mid, POINT(-98.375759790423 19.163793546584) +PDX, Portland Int'l, 4, major, POINT(-122.592738881254 45.5889569315305) +PER, Perth Int'l, 4, major, POINT(115.974224942233 -31.9411297945783) +PLZ, H F Verwoerd, 4, mid, POINT(25.6117777567602 -33.9840877431374) +PMC, El Tepual Int'l, 4, mid, POINT(-73.0983841336424 -41.4333820702269) +PNH, Pochentong, 4, major, POINT(104.845027612457 11.5526449176513) +PNQ, Pune, 4, [major,military], POINT(73.9089838110016 18.5791766115328) +POM, Port Moresby Int'l, 4, major, POINT(147.211250855977 -9.43865269316142) +PTY, Tocumen Int'l, 4, major, POINT(-79.3871348215438 9.06687242265839) +PUQ, Carlos Ibáñez de Campo Int'l, 4, mid, POINT(-70.8431237851324 -53.0050698255177) +RDU, Durham Int'l, 4, major, POINT(-78.7913814006751 35.8752323452255) +RGN, Mingaladon, 4, major, POINT(96.1341946114947 16.9011542818251) +RIX, Riga, 4, major, POINT(23.9793791116995 56.9220038786097) +SAH, Sanaa Int'l, 4, mid, POINT(44.2246467902561 15.4739027755737) +SDA, Baghdad Int'l, 4, major, POINT(44.2289125352942 33.268162986377) +SDQ, De Las Américas Int'l, 4, major, POINT(-69.6764726754667 18.4302196948173) +SGN, Tan Son Nhat, 4, major, POINT(106.664246141375 10.8163005571879) +SKG, Thessaloniki, 4, major, POINT(22.9764353610613 40.5238736887775) +SOF, Vrazhdebna, 4, major, POINT(23.4024521357708 42.6891841273195) +STV, Surat, 4, major, POINT(72.7424384372589 21.1204503297172) +SUV, Nausori Int'l, 4, mid, POINT(178.560048369959 -18.0458996922854) +SYZ, Shiraz Int'l, 4, major, POINT(52.5897712745211 29.5458013842874) +TAM, Gen Francisco J Mina Int'l, 4, mid, POINT(-97.8698137568394 22.2893319525064) +TGU, Toncontin Int'l, 4, mid, POINT(-87.2192116348986 14.0599852192071) +THR, Mehrabad Int'l, 4, major, POINT(51.3208069717572 35.6913743304946) +TIA, Tirane Rinas, 4, major, POINT(19.7150324049722 41.4208514680567) +TIJ, General Abelardo L Rodriguez Int'l, 4, major, POINT(-116.975476095598 32.5460499135013) +TLC, Jose Maria Morelos Y Pavon, 4, mid, POINT(-99.5706494463542 19.3386880423032) +TLL, Ulemiste, 4, major, POINT(24.798964869983 59.4165014697451) +TLV, Ben Gurion, 4, major, POINT(34.8708499180995 32.0007468501844) +TMS, São Tomé Salazar, 4, mid, POINT(6.71282193005667 0.374744213699427) +TNR, Antananarivo Ivato, 4, mid, POINT(47.4753540009579 -18.7993348763082) +TPA, Tampa Int'l, 4, major, POINT(-82.534824252055 27.9800400852184) +VLN, Zim Valencia, 4, mid, POINT(-67.9223617121873 10.1540056883979) +VOG, Gumrak, 4, mid, POINT(44.354767968489 48.7916764657611) +VTE, Vientiane, 4, mid, POINT(102.568238195728 17.9754595948321) +VVI, Viru Viru Int'l, 4, mid, POINT(-63.1403888218213 -17.6479468257839) +WLG, Wellington Int'l, 4, major, POINT(174.811665268238 -41.3289891844659) +YPR, Prince Rupert, 4, mid, POINT(-130.445587 54.292) +YQG, Windsor, 4, mid, POINT(-82.9600877389448 42.2658784727198) +YQR, Regina, 4, mid, POINT(-104.655433975371 50.4332192867183) +YVR, Vancouver Int'l, 4, major, POINT(-123.180867003812 49.1935590395715) +YWG, Winnipeg Int'l, 4, major, POINT(-97.2267694809585 49.9033302471671) +YXE, John G Diefenbaker Int'l, 4, mid, POINT(-106.690181967554 52.1701439447381) +YXY, Whitehorse Int'l, 4, mid, POINT(-135.076210089402 60.7141521481397) +YYC, Calgary Int'l, 4, major, POINT(-114.010560500236 51.1308572567549) +YYG, Charlottetown, 4, mid, POINT(-63.1312341333234 46.2858131367525) +YYQ, Churchill, 4, mid, POINT(-94.0813639506318 58.7497237849788) +YYT, St John's Int'l, 4, mid, POINT(-52.7433337428638 47.6131179007955) +YZF, Yellowknife, 4, mid, POINT(-114.437846335049 62.4707373610202) +ZAG, Zagreb, 4, major, POINT(16.0615138009014 45.7333266730984) +ZNZ, Zanzibar, 4, mid, POINT(39.2223319841558 -6.21857034620282) +REK, Reykjavik Air Terminal, 4, mid, POINT(-21.9466344031327 64.1318728609901) +ARH, Arkhangelsk-Talagi, 4, mid, POINT(40.7133465694594 64.5967437730455) +KZN, Kazan Int'l, 4, major, POINT(49.2984458036407 55.6080601429764) +ORY, Paris Orly, 4, major, POINT(2.36737912783773 48.7313030458052) +YQB, Québec, 4, major, POINT(-71.3839280711731 46.7915684363308) +YUL, Montréal-Trudeau, 4, major, POINT(-73.7493162650417 45.4583512294531) +NRT, Narita Int'l, 4, major, POINT(140.384401709179 35.7640560727828) +NGO, Chubu Centrair Int'l, 4, major, POINT(136.814771286824 34.8590296958162) +OKD, Okadama, 4, mid, POINT(141.382100450075 43.1106495990978) +BGO, Bergen Flesland, 4, major, POINT(5.22725311562336 60.2890610502966) +TOS, Tromsø Langnes, 4, major, POINT(18.9072624292132 69.6796790473478) +BEL, Val de Caes Int'l, 4, mid, POINT(-48.4795602893793 -1.38974628795546) +CGR, Campo Grande Int'l, 4, mid, POINT(-54.6689498781305 -20.4572717360311) +CWB, Afonso Pena Int'l, 4, mid, POINT(-49.1737093663469 -25.5360001430558) +FOR, Pinto Martins Int'l, 4, mid, POINT(-38.5407472498334 -3.77859496233091) +GRU, São Paulo-Guarulhos Int'l, 4, major, POINT(-46.481753608842 -23.4261155770421) +GYN, Santa Genoveva, 4, mid, POINT(-49.2266464905994 -16.6323665721637) +POA, Salgado Filho Int'l, 4, mid, POINT(-51.1770409488172 -29.9901930170609) +REC, Gilberto Freyre Int'l, 4, mid, POINT(-34.9182667174851 -8.13162553076239) +SSA, Deputado Luis Eduardo Magalhaes Int'l, 4, mid, POINT(-38.3347989911732 -12.9143614970326) +MDZ, El Plumerillo, 4, mid, POINT(-68.7984838394473 -32.8278001692719) +MAO, Eduardo Gomes Int'l, 4, mid, POINT(-60.0460645898854 -3.0321390062591) +NSI, Yaoundé Nsimalen Int'l, 4, mid, POINT(11.5479941396807 3.71484520708126) +PVG, Shanghai Pudong Int'l, 4, major, POINT(121.801518760578 31.1523090295533) +ADJ, Marka Int'l, 4, mid, POINT(35.9841052362449 31.9741994015442) +MLE, Male Int'l, 4, major, POINT(73.5273902836844 4.18870090323372) +VER, Gen. Heriberto Jara Int'l, 4, mid, POINT(-96.1835702143695 19.1424237025017) +OXB, Osvaldo Vieira Int'l, 4, mid, POINT(-15.651185561666 11.8889231454855) +DVO, Francisco Bangoy Int'l, 4, major, POINT(125.645066609434 7.13053746163073) +SEZ, Seychelles Int'l, 4, mid, POINT(55.5115519246793 -4.67106914178521) +DKR, Léopold Sedar Senghor Int'l, 4, major, POINT(-17.490407907719 14.7456306146748) +PZU, Port Sudan New Int'l, 4, mid, POINT(37.2387475981025 19.4341052385231) +TAS, Tashkent Int'l, 4, major, POINT(69.2666137241129 41.2622338767383) +CPH, Copenhagen, 3, major, POINT(12.6493508684508 55.6285017221528) +BBU, Aeroportul National Bucuresti-Baneasa, 3, mid, POINT(26.0857251587764 44.497041455972) +BUD, Ferihegy, 3, major, POINT(19.2622301677881 47.433274269248) +CKG, Chongqing Jiangbei Int'l, 3, major, POINT(106.638019704811 29.7240422241688) +CLT, Douglas Int'l, 3, major, POINT(-80.9439277342763 35.2204281685597) +DTW, Detroit Metro, 3, major, POINT(-83.3478935065615 42.2257204508004) +DUB, Dublin, 3, major, POINT(-6.24388491037139 53.42700828497) +FAI, Fairbanks Int'l, 3, major, POINT(-147.865721120795 64.8180981117369) +HAM, Hamburg, 3, major, POINT(10.005647830925 53.6320011640866) +KUL, Kuala Lumpur Int'l, 3, major, POINT(101.713886325743 2.74751295791811) +LAS, Mccarran Int'l, 3, major, POINT(-115.151323951283 36.0849602383367) +MCO, Orlando Int'l, 3, major, POINT(-81.3073713307985 28.4311506791138) +MSP, Minneapolis St. Paul Int'l, 3, major, POINT(-93.2081003718301 44.8820263631968) +MUC, Franz-Josef-Strauss, 3, major, POINT(11.7880627192437 48.3538373961608) +PHL, Philadelphia Int'l, 3, major, POINT(-75.2429857676998 39.876087236427) +PHX, Sky Harbor Int'l, 3, major, POINT(-112.01363529773 33.4358607639498) +SLC, Salt Lake City Int'l, 3, major, POINT(-111.981984879993 40.7867290053708) +STL, Lambert St Louis Int'l, 3, major, POINT(-90.3659545350675 38.7427163155204) +WAW, Okecie Int'l, 3, major, POINT(20.9727263383587 52.171026749259) +ZRH, Zurich Int'l, 3, major, POINT(8.56221279534765 47.4523895064915) +CRL, Gosselies, 3, mid, POINT(4.4543736298165 50.4571296549567) +MUCf, Munich Freight Terminal, 3, major, POINT(11.7694828593654 48.3497964078377) +BCN, Barcelona, 3, major, POINT(2.07800334981292 41.3031552797463) +PRG, Ruzyn, 3, major, POINT(14.2674849854076 50.1076511703671) +HKG, Hong Kong Int'l, 2, major, POINT(113.935016387376 22.3153328280868) +TPE, Taoyuan, 2, major, POINT(121.231370453323 25.0767411043346) +AMS, Schiphol, 2, major, POINT(4.76437693232812 52.3089323889822) +SIN, Singapore Changi, 2, major, POINT(103.986413880993 1.35616083528126) +LHR, London Heathrow, 2, major, POINT(-0.453156652063309 51.4709958799938) +AKL, Auckland Int'l, 2, major, POINT(174.791719433715 -37.0063551142815) +ANC, Anchorage Int'l, 2, major, POINT(-149.981725100633 61.1728936745367) +ATL, Hartsfield-Jackson Atlanta Int'l, 2, major, POINT(-84.4253974336047 33.6405290807352) +PEK, Beijing Capital, 2, major, POINT(116.588174004661 40.078766336331) +BOG, Eldorado Int'l, 2, major, POINT(-74.1433718001028 4.69883276192097) +BOM, Chhatrapati Shivaji Int'l, 2, major, POINT(72.8745639500051 19.0951019488402) +BOS, Gen E L Logan Int'l, 2, major, POINT(-71.0164066172958 42.3665658198506) +BWI, Baltimore-Washington Int'l Thurgood Marshall,2, major, POINT(-76.6686428352448 39.1793943583568) +CAI, Cairo Int'l, 2, major, POINT(31.3997430067114 30.1119904385575) +CAS, Casablanca-Anfa, 2, mid, POINT(-7.66321880771143 33.5627883851079) +CCS, Simón Bolivar Int'l, 2, mid, POINT(-67.0057488076316 10.5973549146064) +CPT, Cape Town Int'l, 2, major, POINT(18.5976565083138 -33.9704466120395) +CTU, Chengdushuang Liu, 2, major, POINT(103.956136481695 30.5810712647464) +DEL, Indira Gandhi Int'l, 2, major, POINT(77.0878362565332 28.5592039760586) +DEN, Denver Int'l, 2, major, POINT(-104.673797338542 39.8494613881509) +DFW, Dallas-Ft. Worth Int'l, 2, major, POINT(-97.0403710741144 32.9001505594816) +DMK, Don Muang Int'l, 2, major, POINT(100.602578626505 13.9202766010347) +DXB, Dubai Int'l, 2, major, POINT(55.3540769172243 25.2525655938182) +EWR, Newark Int'l, 2, major, POINT(-74.1771472796706 40.6904798278929) +EZE, Ministro Pistarini Int'l, 2, major, POINT(-58.5412456939382 -34.8136469380323) +FLL, Fort Lauderdale Hollywood Int'l, 2, major, POINT(-80.1452588465189 26.0717095746827) +IAH, George Bush Intercontinental, 2, major, POINT(-95.3337047912947 29.9865909034907) +IST, Atatürk Hava Limani, 2, major, POINT(28.8195493087893 40.9778388177797) +JNB, OR Tambo Int'l, 2, major, POINT(28.2319885648741 -26.1320953994887) +JNU, Juneau Int'l, 2, mid, POINT(-134.583573037872 58.3589441045951) +LAX, Los Angeles Int'l, 2, major, POINT(-118.402468548522 33.9441742543586) +LIN, Linate, 2, major, POINT(9.27996629691061 45.4603938456252) +MEL, Melbourne Int'l, 2, major, POINT(144.848998091131 -37.6699411967893) +MEX, Lic Benito Juarez Int'l, 2, major, POINT(-99.0826079514239 19.4354695720494) +MNL, Ninoy Aquino Int'l, 2, major, POINT(121.004122083437 14.5068323762967) +NBO, Jomo Kenyatta Int'l, 2, major, POINT(36.9250887490365 -1.33052964350634) +HNL, Honolulu Int'l, 2, major, POINT(-157.919783173755 21.332022315024) +ORD, Chicago O'Hare Int'l, 2, major, POINT(-87.90513439065 41.9765291023803) +RUH, King Khalid Int'l, 2, major, POINT(46.701829023464 24.9590317436512) +SCL, Arturo Merino Benitez Int'l, 2, major, POINT(-70.7936860162974 -33.3968336342597) +SEA, Tacoma Int'l, 2, major, POINT(-122.302289722924 47.4435819127259) +SFO, San Francisco Int'l, 2, major, POINT(-122.383470344449 37.6170250868053) +SHA, Hongqiao, 2, major, POINT(121.341183788567 31.1872574314078) +SVO, Sheremtyevo, 2, major, POINT(37.4159690348414 55.966447172512) +YYZ, Toronto-Pearson Int'l, 2, major, POINT(-79.6114193247449 43.6809595186356) +SYD, Kingsford Smith, 2, major, POINT(151.166067305601 -33.9365832057717) +HEL, Helsinki Vantaa, 2, major, POINT(24.9682078665914 60.3187158912982) +CDG, Charles de Gaulle Int'l, 2, major, POINT(2.54186776739457 49.0144200969386) +TXL, Berlin-Tegel Int'l, 2, major, POINT(13.2903090925074 52.5544287044101) +VIE, Vienna Schwechat Int'l, 2, major, POINT(16.5607679642129 48.1197563052538) +FRA, Frankfurt Int'l, 2, major, POINT(8.57182286907608 50.0506770895207) +FCO, Leonardo da Vinci Int'l, 2, major, POINT(12.2501008973638 41.7950786307394) +ITM, Osaka Int'l, 2, major, POINT(135.442475256249 34.7901980848749) +GMP, Gimpo Int'l, 2, major, POINT(126.802392860276 37.5573005399508) +OSL, Oslo Gardermoen, 2, major, POINT(11.0991032762581 60.1935783171386) +BSB, Juscelino Kubitschek Int'l, 2, major, POINT(-47.9207885133625 -15.8699985002824) +CGH, Congonhas Int'l, 2, major, POINT(-46.6591155302196 -23.62685882701) +GIG, Rio de Janeiro-Antonio Carlos Jobim Int'l, 2, major, POINT(-43.2483813790683 -22.8123437125006) +MAD, Madrid Barajas, 2, major, POINT(-3.56902665458863 40.4681282733923) +SJU, Luis Muñoz Marin, 2, major, POINT(-66.0042299757548 18.4380770734949) +ARN, Arlanda, 2, major, POINT(17.9307299016916 59.6511203397372) +CGK, Soekarno-Hatta Int'l, 2, major, POINT(106.654296151172 -6.1266029559729) +ATH, Eleftherios Venizelos Int'l, 2, major, POINT(23.9471160554073 37.9362331299254) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_web.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_web.csv new file mode 100644 index 0000000000000..4f79c01845028 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/airports_web.csv @@ -0,0 +1,850 @@ +abbrev:keyword,name:text,scalerank:integer,type:keyword,location:cartesian_point +LUH,Sahnewal,9,small,POINT(8455502.60394535 3613330.7325705294) +SSE,Solapur,9,mid,POINT(8452829.548091331 1993750.242252398) +IXR,Birsa Munda,9,mid,POINT(9498179.37479558 2670487.5434313933) +AWZ,Ahwaz,9,mid,POINT(5426503.078078238 3677395.4640035294) +HOD,Hodeidah Int'l,9,mid,POINT(4783520.559160681 1661010.0197476079) +IDR,Devi Ahilyabai Holkar Int'l,9,mid,POINT(8439051.727244465 2599127.5424638605) +ISK,Gandhinagar,9,mid,POINT(8216554.78842419 2269006.0308637465) +IXU,Aurangabad,9,mid,POINT(8393026.88321721 2257317.0388104157) +OMS,Omsk Tsentralny,9,mid,POINT(8161539.810548711 7353650.845101996) +OVB,Novosibirsk Tolmachev,9,mid,POINT(9202465.316351846 7363726.532780712) +PKU,Simpang Tiga,9,mid,POINT(11292980.437026992 51719.69939393884) +ROP,Rota Int'l,9,mid,POINT(16168485.927626405 1593930.8182564748) +SGC,Surgut,9,mid,POINT(8171796.44765684 8704352.415875759) +TRZ,Tiruchirappalli,9,mid,POINT(8761841.111486122 1204941.537981898) +TUK,Turbat Int'l,9,mid,POINT(7016237.446487907 2997693.1716453317) +UET,Quetta Int'l,9,mid,POINT(7452698.660661697 3535602.359115375) +ZAH,Zahedan Int'l,9,mid,POINT(6779435.866395892 3436280.545331025) +BAX,Barnaul,9,mid,POINT(9300793.907152018 7050499.2403030945) +VIBY,Bareilly,9,military,POINT(8844556.48169578 3302259.0587350638) +OPQS,Dhamial,9,small,POINT(8129890.599688743 3970061.6672929903) +CJJ,Cheongju Int'l,9,major,POINT(14192780.461221408 4400430.851323913) +TAE,Daegu Int'l,9,mid,POINT(14319865.193657074 4286860.313581915) +USN,Ulsan,9,mid,POINT(14399814.111402465 4244748.079758785) +WIIT,Radin Inten II,9,mid,POINT(11708145.489503577 -584415.9142832769) +IXD,Allahabad,9,military,POINT(9098334.24757184 2930320.416739089) +CEK,Chelyabinsk,9,mid,POINT(6847513.34653189 7419877.287944653) +RTM,Rotterdam The Hague,8,mid,POINT(493573.29525730683 6790932.76136571) +VOZ,Voronezh-Chertovitskoye,8,mid,POINT(4366557.080596029 6766314.883439611) +LPL,Liverpool John Lennon,8,major,POINT(-318220.1960029257 7045462.213490053) +VTZ,Vishakapatnam,8,mid,POINT(9264400.050848246 2005730.8942003187) +UPG,Sultan Hasanuddin Int'l,8,major,POINT(13307765.486738103 -563891.4411780944) +VAV,Vava'u Int'l,8,mid,POINT(-19366039.63214016 -2106254.9227825566) +NCL,Newcastle Int'l,8,major,POINT(-190394.8217633351 7369066.857457128) +LCE,Goloson Int'l,8,mid,POINT(-9668261.248290801 1775229.5340126597) +MED,Madinah Int'l,8,major,POINT(4419287.5995595 2819867.0163798034) +YMX,Mirabel Int'l,8,mid,POINT(-8240841.451455459 5729730.32650937) +PLQ,Palanga Int'l,8,mid,POINT(2348556.9901333293 7552712.896849933) +JAI,Jaipur Int'l,8,mid,POINT(8438129.881488405 3101148.238256189) +IXW,Sonari,8,mid,POINT(9592675.061834114 2609711.3556761574) +YEI,Yenisehir,8,mid,POINT(3288925.4499280043 4903146.366382311) +ADA,Şakirpaşa,8,major,POINT(3929239.7725849575 4437045.3220224455) +ADQ,Kodiak,8,mid,POINT(-16974623.632803146 7914688.984267773) +AMA,Amarillo Int'l,8,major,POINT(-11321788.081606884 4193601.017647079) +ASP,Alice Springs,8,mid,POINT(14906004.647493467 -2729295.548151684) +BBI,Biju Patnaik,8,mid,POINT(9553092.491190977 2302986.8243190064) +BET,Bethel,8,mid,POINT(-18015833.618824102 8575194.185266923) +BGA,Palonegro,8,mid,POINT(-8146462.836181615 795507.4679731242) +BHM,Birmingham Int'l,8,major,POINT(-9657230.472993236 3970122.1375035276) +BHQ,Broken Hill,8,mid,POINT(15748413.703297768 -3763178.934067017) +BIL,Logan Int'l,8,major,POINT(-12082275.711751565 5748945.312867002) +BIS,Bismarck Muni.,8,mid,POINT(-11216270.39914765 5905442.7024805) +BJX,Del Bajio Int'l,8,mid,POINT(-11296563.152872415 2390195.8574735913) +BNI,Benin,8,mid,POINT(623799.0891538061 704652.9070746127) +BOJ,Bourgas,8,major,POINT(3063112.6791152563 5246307.98528631) +BRE,Bremen,8,major,POINT(978037.6584513546 6992675.481947904) +BRM,Jacinto Lara Int'l,8,mid,POINT(-7720685.958046343 1124010.8981878893) +BRO,Brownsville-South Padre Island Int'l,8,mid,POINT(-10846054.504764346 2987477.244173611) +BRS,Bristol Int'l,8,major,POINT(-301772.0770497764 6689911.121977719) +BTR,Baton Rouge Metro,8,major,POINT(-10147523.482711002 3572197.752773574) +BTS,Bratislava-M.R. Štefánik,8,major,POINT(1914693.5720991618 6135156.464057406) +BTV,Burlington Int'l,8,mid,POINT(-8143586.11862753 5538342.439387725) +CAE,Columbia Metro,8,major,POINT(-9029049.897824744 4019970.844460819) +CCJ,Calicut Int'l,8,major,POINT(8454825.87302229 1247936.18972128) +CCK,Cocos (Keeling) Islands,8,mid,POINT(10778926.83405992 -1366787.9252723285) +CFU,Corfu Int'l (Ioannis Kapodistrias),8,mid,POINT(2216900.5154671874 4808960.53091504) +CGQ,Changchun Longjia Int'l,8,major,POINT(13991797.649996879 5464360.757377616) +CJB,Coimbatore,8,mid,POINT(8575930.4260053 1235525.115500211) +CLJ,Someseni,8,mid,POINT(2636822.6931889798 5906670.9548221715) +CMW,Ignacio Agramonte,8,mid,POINT(-8665677.337307706 2442592.6191352885) +CPR,Casper/Natrona County Int'l,8,major,POINT(-11851567.783803489 5296336.193346968) +CRK,Clark Int'l,8,major,POINT(13419650.356063033 1710834.7683201516) +CTA,Catania Fontanarossa,8,major,POINT(1677302.030486277 4504832.994753228) +CTM,Chetumal Int'l,8,mid,POINT(-9832211.652516523 2096911.838660972) +CWL,Cardiff,8,major,POINT(-371759.094310016 6692110.5772358915) +CYB,Gerrard Smith,8,mid,POINT(-8892140.994383324 2236327.1901728613) +CZM,Cozumel Int'l,8,mid,POINT(-9677048.57568442 2333731.052796642) +DAY,James M. Cox Dayton Int'l,8,major,POINT(-9375378.65743411 4851281.9169470705) +DBO,Dubbo,8,mid,POINT(16538705.24374076 -3792050.261192505) +DCA,Washington Nat'l,8,major,POINT(-8576425.087554498 4700739.2820124375) +DGO,Durango Int'l,8,mid,POINT(-11636654.510161458 2768793.0561534814) +DNK,Voloskoye,8,mid,POINT(3906635.748371418 6168224.997725351) +DOK,Donetsk,8,major,POINT(4201732.868678245 6118369.504076505) +DZO,Santa Bernardina Int'l,8,mid,POINT(-6289469.256526324 -3943067.186649304) +EDI,Edinburgh Int'l,8,major,POINT(-374510.45803289703 7548181.024106322) +EIS,Terrance B. Lettsome Int'l,8,mid,POINT(-7184242.835168404 2089626.4842600985) +EKO,Elko Reg.,8,mid,POINT(-12889291.908879902 4986946.728825229) +ESE,Ensenada,8,mid,POINT(-12979376.668927394 3736786.63505639) +FAE,Vágar,8,mid,POINT(-809381.7536597336 8873977.812306616) +FAT,Fresno Yosemite Int'l,8,mid,POINT(-13327169.58511052 4407070.049111948) +FLG,Flagstaff Pulliam,8,mid,POINT(-12431565.859544765 4182774.746162051) +FRS,Mundo Maya Int'l,8,mid,POINT(-10005155.429448588 1910929.774595442) +FSD,Sioux Falls Reg.,8,mid,POINT(-10768088.310616335 5400704.342583571) +GEG,Spokane Int'l,8,major,POINT(-13084140.802946268 6044772.376338353) +GGT,Exuma Int'l,8,mid,POINT(-8446100.653811105 2700354.6779349237) +GIB,Gibraltar,8,mid,POINT(-595199.9142010021 4321456.558557268) +GRR,Gerald R. Ford Int'l,8,mid,POINT(-9521108.537199756 5294450.235274964) +GSO,Triad Int'l,8,major,POINT(-8898489.001675807 4315130.954430402) +GTF,Great Falls Int'l,8,mid,POINT(-12396169.440330252 6021151.295119785) +GZT,Gaziantep Oğuzeli Int'l,8,major,POINT(4171564.69612111 4431493.875868084) +HBX,Hubli,8,mid,POINT(8358570.414575938 1730629.8095922712) +HDY,Hat Yai Int'l,8,mid,POINT(11175781.270722937 774043.1418880608) +HFE,Hefei-Luogang,8,mid,POINT(13058243.479722492 3734448.089605577) +HRG,Hurghada Int'l,8,major,POINT(3763395.907768361 3146031.7460181173) +HRK,Kharkov Int'l,8,major,POINT(4038916.1487897974 6432698.341696475) +HSV,Huntsville Int'l,8,major,POINT(-9659742.474646779 4116193.303177003) +IBA,Ibadan,8,mid,POINT(442362.8778862212 821612.6447678236) +ICT,Kansas City Int'l,8,major,POINT(-10845717.58832889 4530511.531807453) +ILM,Wilmington Int'l,8,mid,POINT(-8672943.345543798 4064681.1100209253) +ILR,Ilorin Int'l,8,mid,POINT(500363.34321139066 942432.5693217871) +INL,Falls Int'l,8,mid,POINT(-10397018.107826466 6201537.80542095) +INV,Inverness,8,mid,POINT(-452356.9650553498 7871196.74278086) +IPL,Imperial Cty.,8,mid,POINT(-12865415.696434157 3873285.4354750603) +IXJ,Jammu,8,mid,POINT(8331407.590069867 3853043.8659432284) +IXM,Madurai,8,mid,POINT(8693065.883907339 1100490.6215684628) +JLR,Jabalpur,8,mid,POINT(8912098.603220006 2654350.0740737827) +JRO,Kilimanjaro Int'l,8,mid,POINT(4126078.033182307 -381434.62997014524) +KAD,Kaduna,8,mid,POINT(815443.4866656137 1197493.468809646) +KGA,Kananga,8,mid,POINT(2502276.6110788053 -657967.4512964803) +KMS,Kumasi,8,mid,POINT(-177284.66764144658 749183.4082690945) +KNA,Viña del Mar,8,mid,POINT(-7957184.275847894 -3888455.835146245) +KNU,Kanpur,8,mid,POINT(8946472.94752109 3053534.6493105646) +KOA,Kona Int'l at Keahole,8,mid,POINT(-17370392.35884766 2241912.5796443797) +KOI,Kirkwall,8,mid,POINT(-322979.9766892366 8170543.7302916255) +KTU,Kota,8,mid,POINT(8443638.79089507 2895830.318753794) +LEX,Blue Grass,8,major,POINT(-9417436.137112718 4584714.387694398) +LIH,Lihue,8,mid,POINT(-17738658.92159534 2508899.0003370745) +LIT,Clinton National,8,major,POINT(-10265948.911502535 4127036.8399761617) +LMM,Los Mochis,8,mid,POINT(-12143030.022268604 2960552.2966441293) +LOV,Venustiano Carranza Int'l,8,mid,POINT(-11295027.684094377 3117899.762984433) +LRD,Laredo Int'l,8,mid,POINT(-11071353.471975287 3191561.350631171) +LSI,Sumburgh,8,mid,POINT(-143386.25994213327 8372335.289070629) +LTK,Bassel Al-Assad Int'l,8,major,POINT(4001294.572750907 4219371.835967912) +LTN,London Luton,8,major,POINT(-41881.42782922769 6778510.173845021) +LYR,Svalbard Longyear,8,mid,POINT(1724921.0020051654 14502137.593437826) +MBJ,Sangster Int'l,8,mid,POINT(-8673835.583232503 2096292.1109924756) +MDL,Mandalay Int'l,8,mid,POINT(10683404.289295465 2476209.3811312905) +MDW,Chicago Midway Int'l,8,major,POINT(-9767408.864089532 5129327.726857204) +MGM,Montgomery Reg.,8,major,POINT(-9616925.035951443 3803359.277461787) +MHT,Manchester-Boston Reg.,8,major,POINT(-7952388.785102858 5301006.050448044) +DNMA,Maiduguri Int'l,8,mid,POINT(1456631.0122478239 1329037.0143177188) +MJM,Mbuji Mayi,8,mid,POINT(2624035.1929455665 -683116.9489604657) +MOT,Minot Int'l,8,mid,POINT(-11275705.459089473 6149484.221118551) +MSO,Missoula Int'l,8,mid,POINT(-12699738.826715998 5928823.781126811) +MXL,Gen R.S. Taboada Int'l,8,mid,POINT(-12829334.654012986 3846105.236601159) +MXP,Malpensa,8,major,POINT(969922.2187413104 5720845.988980041) +NLK,Norfolk Island,8,mid,POINT(18695373.11511 -3380121.7818557587) +NUE,Nurnberg,8,major,POINT(1233132.528159873 6359188.953344722) +ODS,Odessa Int'l,8,major,POINT(3414929.187260608 5851243.349732847) +OOL,Gold Coast,8,mid,POINT(17088975.215953026 -3269984.022480106) +ORN,Oran Es Senia,8,mid,POINT(-67548.32909537641 4248496.772400898) +PAT,Lok Nayak Jaiprakash,8,mid,POINT(9472275.896415105 2948937.113945639) +PDU,Paysandu,8,mid,POINT(-6464159.711970307 -3810851.2315571434) +PFO,Paphos Int'l,8,major,POINT(3616016.8685464207 4125001.4095472917) +PLM,Sultan Mahmud Badaruddin II,8,mid,POINT(11655053.65183475 -322963.7194879351) +PTG,Polokwane Int'l,8,mid,POINT(3278730.8515442796 -2736234.3627758687) +PUJ,Punta Cana,8,mid,POINT(-7610160.521143836 2103557.773842323) +QRO,Queretaro Int'l,8,mid,POINT(-11152805.835673615 2346918.2122207084) +RAJ,Rajkot,8,mid,POINT(7879188.530175245 2548698.927041634) +RIC,Richmond Int'l,8,major,POINT(-8608683.499570044 4510194.66673246) +RJH,Shah Makhdum,8,mid,POINT(9864443.602036383 2807704.1980897053) +ROC,Greater Rochester Int'l,8,major,POINT(-8645655.470767464 5331406.755479048) +ROK,Rockhampton,8,mid,POINT(16751234.18917346 -2677868.447049639) +ROV,Rostov-on-Don,8,mid,POINT(4430906.959748802 5983814.704682168) +RTW,Saratov,8,mid,POINT(5124595.3468343755 6721071.101067962) +SAP,Ramón Villeda Morales Int'l,8,mid,POINT(-9788015.195435785 1741793.3596854515) +SBA,Santa Barbara Muni.,8,mid,POINT(-13340149.466371058 4086111.600439102) +SCC,Deadhorse,8,mid,POINT(-16526252.822861642 11133306.995482098) +SFJ,Kangerlussuaq,8,mid,POINT(-5643252.418852878 10161216.48600118) +SGF,Springfield Reg.,8,major,POINT(-10395307.700094266 4472912.56616818) +SHV,Shreveport Reg.,8,major,POINT(-10444943.31574418 3823130.335275332) +SIP,Simferopol Int'l,8,major,POINT(3784423.3005313645 5624704.86344426) +SIT,Sitka Rocky Gutierrez,8,mid,POINT(-15068839.904319104 7771059.43215472) +SJD,Los Cabos Int'l,8,major,POINT(-12213736.126559211 2651701.1924684388) +SLE,McNary Field,8,major,POINT(-13693173.616651691 5607444.70675169) +SLW,Plan de Guadalupe,8,mid,POINT(-11235727.848882718 2943205.3642723667) +SNN,Shannon,8,major,POINT(-993240.2368420266 6926495.232889382) +SON,Santo Pekoa Int'l,8,mid,POINT(18614944.872420333 -1747531.4057391365) +SRG,Achmad Yani,8,mid,POINT(12287284.676877439 -778797.2653059739) +TAP,Tapachula Int'l,8,mid,POINT(-10282581.698533153 1665140.101219538) +TGD,Podgorica,8,major,POINT(2142531.380922883 5216254.438035203) +TLH,Tallahassee Reg.,8,major,POINT(-9389241.938720442 3554497.0951100723) +TRN,Turin Int'l,8,major,POINT(850944.2551802637 5651745.077274048) +TYN,Taiyuan Wusu Int'l,8,major,POINT(12537456.896295004 4544804.369730283) +UAK,Narsarsuaq,8,mid,POINT(-5055730.620995213 8663253.79608162) +VFA,Victoria Falls,8,mid,POINT(2877249.021340671 -2049141.4070381462) +VGA,Vijaywada,8,mid,POINT(8994315.184034804 1866024.5606041239) +VNS,Varanasi,8,mid,POINT(9223251.085231334 2931107.6427009604) +VRA,Juan Gualberto Gomez,8,major,POINT(-9065493.13194443 2636801.3028389914) +VSA,Villahermosa,8,mid,POINT(-10332571.339315712 2036736.9495373347) +YBR,Brandon,8,mid,POINT(-11125926.238492947 6429792.717696196) +YFB,Iqaluit,8,mid,POINT(-7629473.800098713 9286851.57832899) +YHM,John C. Munro Hamilton Int'l,8,mid,POINT(-8897368.719974093 5336870.13325121) +YMM,Fort McMurray,8,mid,POINT(-12381381.238061914 7690194.899629589) +YPE,Peace River,8,mid,POINT(-13073768.785220405 7604724.391298121) +YQM,Greater Moncton Int'l,8,mid,POINT(-7201109.768953818 5798990.9030672265) +YQY,Sydney/J.A. Douglas McCurdy,8,mid,POINT(-6684394.474102403 5807206.339210798) +YRB,Resolute Bay,8,mid,POINT(-10572101.354980916 12812131.986712972) +YSM,Fort Smith,8,mid,POINT(-12463448.18099093 8404164.160643855) +YTH,Thompson,8,mid,POINT(-10893806.966216503 7518205.231648724) +YTS,Timmins,8,mid,POINT(-9058294.836846324 6201565.547178734) +YUT,Repulse Bay,8,mid,POINT(-9601306.080919845 10024354.442948502) +YVP,Kuujjuaq,8,mid,POINT(-7617964.784721942 7988766.504596894) +YWK,Wabush,8,mid,POINT(-7444269.3096940005 6969334.746868434) +YXD,Edmonton City Centre,8,mid,POINT(-12637319.62435103 7089313.315893888) +YXJ,Fort St. John (N. Peace),8,mid,POINT(-13440318.909673138 7607550.988458597) +YYB,North Bay/Jack Garland,8,mid,POINT(-8841540.537501581 5838020.434522123) +ZAR,Zaria,8,mid,POINT(855742.7196221425 1247453.2980989793) +SKP,Skopje,8,mid,POINT(2407639.8975023176 5154458.785750381) +VE23,Burnpur,8,mid,POINT(9681962.25914386 2708534.3634418976) +VIDX,Hindon Air Force Station,8,mid,POINT(8610650.430347139 3338507.4311547293) +,Sunchon,8,major,POINT(14014102.540944425 4780855.139040323) +EPLL,Łódź Władysław Reymont,8,mid,POINT(2159955.9995710915 6750950.4877955355) +JMU,Jiamusi Dongjiao,8,mid,POINT(14522318.278517649 5916487.666913196) +MDG,Mudanjiang Hailang,8,major,POINT(14424796.485481957 5548501.105573782) +OSB,Mosul Int'l,8,mid,POINT(4802968.708507405 4343167.901295984) +,Rostov S.W.,8,mid,POINT(4430206.4362093005 5961005.548269013) +OUL,Oulu,8,mid,POINT(2824491.347188837 9589641.85948846) +BOD,Bordeaux,8,major,POINT(-78123.28939311122 5595129.388870756) +CEQ,Mandelieu,8,mid,POINT(774150.9293260665 5395466.397670621) +DOL,St Gatien,8,mid,POINT(17661.22997514412 6336451.994711043) +LIL,Lille-Lesquin,8,mid,POINT(345754.4419988828 6545869.162216055) +TLS,Toulouse-Blagnac,8,major,POINT(152898.34313102745 5408432.374516665) +FUK,Fukuoka,8,major,POINT(14520980.756743785 3973188.3320597117) +HIW,Hiroshima-Nishi,8,mid,POINT(14740857.144768618 4078779.1778013846) +NKM,Nagoya,8,mid,POINT(15241823.31132671 4198459.780369257) +SDJ,Sendai,8,mid,POINT(15688283.375864074 4598968.345712369) +KKN,Kirkenes Hoybuktmoen,8,mid,POINT(3327489.744242572 10979419.061494261) +CGB,Marechal Rondon Int'l,8,mid,POINT(-6247269.579797729 -1764358.612397846) +FLN,Hercilio Luz Int'l,8,major,POINT(-5403983.775311228 -3206756.4359988477) +JOI,Joinville-Lauro C. de Loyola,8,mid,POINT(-5432574.80745129 -3026887.357786029) +JPA,Presidente Castro Pinto Int'l,8,mid,POINT(-3890492.927031454 -797579.085511895) +NAT,Augusto Severo Int'l,8,major,POINT(-3923883.0330141652 -657850.389136288) +OPO,Francisco Sa Carneiro,8,major,POINT(-965281.6288988747 5047343.097700593) +SLZ,Marechal Cunha Machado Int'l,8,mid,POINT(-4924355.095817733 -287692.4344673997) +THE,Teresina-Senador Petronio Portella,8,mid,POINT(-4766838.65773869 -564397.2561590385) +VCP,Viracopos-Campinas Int'l,8,mid,POINT(-5247720.930998793 -2633182.5280236835) +VIX,Eurico de Aguiar Salles,8,mid,POINT(-4484899.409841375 -2303550.4814070775) +ALC,Alicante,8,major,POINT(-62030.60887578622 4619998.070732385) +LEI,Almeria,8,mid,POINT(-264005.4647312065 4417908.697067482) +VLC,Valencia,8,mid,POINT(-52706.98819688343 4792315.469321795) +KRN,Kiruna_Airport,8,mid,POINT(2263698.7987375925 10395368.076123616) +NRK,Norrköping Airport,8,major,POINT(1807154.0200379654 8090879.147781534) +BDO,Husein Sastranegara Int'l,8,mid,POINT(11975262.33316275 -770015.4151889349) +ROS,Rosario – Islas Malvinas Int'l,8,mid,POINT(-6766007.413670117 -3884189.7670996315) +MCZ,Maceio/Zumbi dos Palmares Int'l,8,mid,POINT(-3984402.331155381 -1064100.720821432) +SSH,Sharm el-Sheikh Int'l,8,mid,POINT(3828290.52724357 3246503.4598812717) +TCP,Taba Int'l,8,mid,POINT(3871228.566901569 3451532.2465900625) +BDQ,Vadodara,8,mid,POINT(8151513.19896513 2551933.881611313) +KSH,Shahid Ashrafi Esfahani,8,mid,POINT(5249446.864614371 4075412.726983416) +BEN,Benina Int'l,8,mid,POINT(2256227.8701199214 3774772.6104427055) +STY,Nueva Hespérides Int'l,8,mid,POINT(-6454758.495452069 -3689683.574835729) +BAIK,Baikonur Cosmodrome,8,spaceport,POINT(7047342.45793506 5774513.8388964385) +KSC,Kennedy Space Center,8,spaceport,POINT(-8976466.227024632 3314233.6639724695) +CSG,Centre Spatial Guyanais,8,spaceport,POINT(-5874154.722978567 584063.0295580276) +AUA,Queen Beatrix Int'l,7,mid,POINT(-7793212.928016378 1403060.2071661926) +JIB,Djibouti-Ambouli Int'l,7,mid,POINT(4803404.055213036 1294776.4983449278) +SAW,Sabiha Gökçen Havaalani,7,major,POINT(3262729.6518857 4998236.219261378) +KSA,Kosrae Island,7,mid,POINT(18140294.850353952 596651.320688774) +FUN,Funafuti Int'l,7,mid,POINT(19947945.358478162 -952503.2767485124) +NAG,Dr. Babasaheb Ambedkar Int'l,7,mid,POINT(8800228.498804038 2402605.2259955257) +HKT,Phuket Int'l,7,mid,POINT(10943378.146616975 905570.5769228583) +NAN,Nadi Int'l,7,mid,POINT(19753771.792048123 -2008647.5942184671) +AGU,Lic. Jesús Terán Peredo Int'l,7,mid,POINT(-11389552.816118516 2475704.9642261537) +ALL,Albenga,7,mid,POINT(904264.4044875562 5472544.55743755) +AMM,Queen Alia Int'l,7,major,POINT(4006355.8750943555 3726960.5297070127) +ARI,Chacalluta Int'l,7,mid,POINT(-7829737.663879898 -2078463.2903823575) +ATR,Atar Int'l,7,mid,POINT(-1452849.6467833705 2332152.2831793325) +BAQ,Ernesto Cortissoz Int'l,7,mid,POINT(-8324088.134775176 1219258.1599457236) +BRC,Teniente Luis Candelaria Int'l,7,mid,POINT(-7921654.161403338 -5033900.199593994) +BYK,Bouaké,7,mid,POINT(-564272.0670974004 863807.8801400807) +BZE,Philip S. W. Goldson Int'l,7,major,POINT(-9830424.56968349 1983316.913916906) +CRP,Corpus Christi Int'l,7,major,POINT(-10853902.810592396 3220567.504659815) +CUR,Hato Int'l,7,mid,POINT(-7676244.635519452 1366751.0275958779) +CUZ,Velazco Astete Int'l,7,major,POINT(-8008732.0584205985 -1521290.1742299814) +DAR,Julius Nyerere Int'l,7,mid,POINT(4364555.763107769 -766236.25732045) +DET,Detroit City,7,mid,POINT(-9239959.467363305 5222458.073872979) +DIL,Presidente Nicolau Lobato Int'l,7,mid,POINT(13973362.852466041 -955256.3689919653) +DME,Moscow Domodedovo Int'l,7,major,POINT(4219036.879250353 7442663.196985433) +DUD,Dunedin Int'l,7,mid,POINT(18946580.338641413 -5768087.463027902) +DZA,Dzaoudzi Pamanzi Int'l,7,mid,POINT(5040745.406460782 -1437456.6844804727) +ELP,El Paso Int'l,7,mid,POINT(-11843916.780693443 3736966.344092591) +EVN,Zvartnots Int'l,7,major,POINT(4942592.410326432 4888108.74092733) +FTW,Fort Worth Meacham Field,7,major,POINT(-10837524.038298065 3871536.006539092) +GDT,JAGS McCartney Int'l,7,mid,POINT(-7919951.380391025 2444675.8786769514) +GLS,Scholes Int'l,7,mid,POINT(-10559254.981462145 3409689.0555304266) +GOM,Goma Int'l,7,mid,POINT(3254987.8658562815 -184628.88894769456) +GOU,Garoua Int'l,7,mid,POINT(1488612.2026627557 1043309.2781158191) +GUM,Antonio B. Won Pat Int'l,7,major,POINT(16119713.525638564 1516072.5798671548) +GYY,Gary/Chicago Int'l,7,mid,POINT(-9730254.084501456 5103897.5235705795) +HAH,Prince Said Ibrahim Int'l,7,mid,POINT(4817302.11908583 -1293019.6871203585) +HBA,Hobart Int'l,7,mid,POINT(16420292.38487077 -5287286.758863006) +HIR,Honiara Int'l,7,mid,POINT(17816223.096637283 -1054240.8270733042) +IEV,Kiev Zhuliany Int'l,7,mid,POINT(3389136.426421935 6518076.263289912) +IND,Indianapolis Int'l,7,major,POINT(-9603910.997645874 4826813.4210143285) +INU,Nauru Int'l,7,mid,POINT(18581019.671998776 -60674.18164536664) +IPC,Mataveri Int'l,7,mid,POINT(-12181699.047597444 -3143322.4850485763) +JUJ,Gob. Horacio Guzman Int'l,7,mid,POINT(-7246204.945703721 -2800527.284017333) +KHN,Nanchang Changbei Int'l,7,mid,POINT(12903262.581378927 3358155.5960129797) +KMG,Kunming Wujiaba Int'l,7,major,POINT(11437200.211897217 2875744.1465633484) +LBA,Leeds Bradford,7,major,POINT(-184771.54921997504 7145400.867562014) +LBV,Libreville Leon M'ba Int'l,7,mid,POINT(1047541.2747129751 50889.04616708905) +LFW,Lomé Tokoin,7,mid,POINT(139253.11953544617 687822.5535436489) +MAJ,Marshall Islands Int'l,7,mid,POINT(19067016.046435077 788822.6351632545) +MFM,Macau Int'l,7,major,POINT(12643056.948534435 2530464.4237606986) +MGQ,Aden Adde Int'l,7,mid,POINT(5043177.848523895 224505.74735198304) +MPM,Maputo Int'l,7,mid,POINT(3626142.4129508836 -2989705.3087203507) +MRU,Sir Seewoosagur Ramgoolam Int'l,7,mid,POINT(6420572.7128638765 -2324249.2006122437) +NAP,Naples Int'l,7,major,POINT(1589958.9694746814 4994374.042314908) +NDB,Nouadhibou Int'l,7,mid,POINT(-1896153.8526920453 2383420.841325014) +NGB,Ningbo Lishe Int'l,7,major,POINT(13521067.885150602 3480539.064616827) +NKC,Nouakchott Int'l,7,mid,POINT(-1775760.271167521 2049013.4688025746) +NOU,La Tontouta Int'l,7,mid,POINT(18503217.64052093 -2513162.7933494844) +OAK,Oakland Int'l,7,major,POINT(-13604718.011410616 4538863.31561408) +ONT,Ontario Int'l,7,major,POINT(-13090318.035377573 4036887.0868777614) +ORK,Cork,7,major,POINT(-945118.2841845707 6772785.8355821455) +PDG,Minangkabau Int'l,7,mid,POINT(11163725.879392691 -87504.95360158873) +PDL,João Paulo II,7,mid,POINT(-2860575.6435568 4543230.3658530535) +PEW,Bacha Khan Int'l,7,mid,POINT(7961438.066966148 4027647.689150116) +PIK,Glasgow Prestwick,7,mid,POINT(-513291.01491689496 7461264.703902999) +PMG,Ponta Porã Int'l,7,mid,POINT(-6201172.390098794 -2577904.048923968) +PMR,Palmerston N. Int'l,7,mid,POINT(19550071.827409033 -4913037.790681443) +PNI,Pohnpei Int'l,7,mid,POINT(17611111.29774732 779085.7073988881) +PPT,Tahiti Faa'a Int'l,7,mid,POINT(-16654482.070742918 -1986047.649798476) +RAI,Praia Int'l,7,mid,POINT(-2614472.04601206 1682861.125883868) +RAK,Marrakech-Menara,7,mid,POINT(-893294.9823897476 3711218.094472542) +RAR,Rarotonga Int'l,7,mid,POINT(-17788649.38995358 -2415859.7629382997) +REP,Siem Reap Int'l,7,major,POINT(11556719.824695675 1506475.2703427267) +RGA,Hermes Quijada Int'l,7,mid,POINT(-7542232.448229492 -7128878.350037609) +RGL,Piloto Civil Norberto Fernandez Int'l,7,mid,POINT(-7715161.0801772615 -6730217.719884303) +RNO,Reno-Tahoe Int'l,7,major,POINT(-13333323.547478328 4794391.43877309) +ROR,Roman Tmetuchl Int'l,7,mid,POINT(14976139.87476799 822078.6365996924) +SID,Amilcar Cabral Int'l,7,mid,POINT(-2554120.7873862777 1889975.13242098) +SJJ,Sarajevo,7,major,POINT(2041223.0393792342 5438537.24696356) +SKB,Robert L. Bradshaw Int'l,7,mid,POINT(-6981314.201527872 1957072.1904655132) +SLA,Martín Miguel de Güemes Int,7,mid,POINT(-7289030.611113107 -2856641.581825258) +SPN,Saipan Int'l,7,mid,POINT(16221887.485914063 1703208.4974420785) +SRE,Juana Azurduy de Padilla Int'l,7,mid,POINT(-7268368.277044414 -2156574.3426799397) +SXM,Princess Juliana Int'l,7,major,POINT(-7025626.436682454 2042493.7259003702) +TAI,Ta'izz Int'l,7,mid,POINT(4913061.539893586 1538158.9292770089) +TAO,Qingdao Liuting Int'l,7,mid,POINT(13400716.661194453 4337527.2570991535) +TKK,Chuuk Int'l,7,mid,POINT(16902979.245892506 832532.2878718402) +TNG,Tangier Ibn Battouta,7,mid,POINT(-658218.8882998485 4262952.461339153) +TRW,Bonriki Int'l,7,mid,POINT(19274523.52822974 153710.21304974618) +TSE,Astana Int'l,7,major,POINT(7954995.913270655 6626059.647523059) +TSN,Tianjin Binhai Int'l,7,major,POINT(13063645.385366186 4740232.750603333) +TUC,Teniente Gen. Benjamin Matienzo Int'l,7,mid,POINT(-7247803.279606909 -3102963.45676297) +TUN,Aeroport Tunis,7,major,POINT(1137429.0770000557 4417864.313958366) +TUS,Tucson Int'l,7,major,POINT(-12349529.746771142 3779119.131277403) +ULN,Chinggis Khaan Int'l,7,mid,POINT(11884788.768741926 6082355.405563132) +URC,Ürümqi Diwopu Int'l,7,major,POINT(9736796.355915418 5449723.243382809) +VLI,Bauerfield Int'l,7,mid,POINT(18737254.693895087 -2002662.2875210827) +WWK,Wewak Int'l,7,mid,POINT(15993171.310729112 -398808.6502783647) +XMN,Xiamen Gaoqi Int'l,7,major,POINT(13149834.020969763 2819005.3334833384) +YAP,Yap Int'l,7,mid,POINT(15371711.104633339 1062179.246843793) +ZLO,Playa de Oro Int'l,7,mid,POINT(-11639576.554971006 2172378.4273351827) +CAY,Cayenne – Rochambeau,7,mid,POINT(-5829112.315344805 537335.497998322) +UIII,Irkutsk N.W.,7,mid,POINT(11599196.977553647 6865782.00373984) +SJW,Shijiazhuang Zhengding Int'l,7,major,POINT(12767484.71571615 4618792.651434057) +GYD,Heydar Aliyev Int'l,7,major,POINT(5571522.645949029 4933417.026045506) +CFB,Cabo Frio Int'l,7,mid,POINT(-4684240.877996653 -2623027.5249208915) +HEM,Helsinki-Malmi,7,mid,POINT(2788056.2440145654 8455469.633769127) +LUX,Luxembourg-Findel,7,major,POINT(692008.8444649252 6383182.97865069) +VCE,Venice Marco Polo,7,major,POINT(1373801.3277301549 5701352.694091503) +YNY,Yangyang Int'l,7,mid,POINT(14322698.382555997 4587733.128111555) +TBT,Tabatinga Int'l,7,mid,POINT(-7785626.624672179 -473578.5300370567) +BVB,Boa Vista Int'l,7,mid,POINT(-6756227.096075219 316410.1201485046) +LPA,Gran Canaria,7,major,POINT(-1713198.560450905 3241019.3864272083) +ING,Com. Armando Tola Int'l,7,mid,POINT(-8020998.660927022 -6495588.415151117) +NYO,Stockholm-Skavsta,7,mid,POINT(1883704.5141685435 8134083.898609498) +MES,Polonia Int'l,7,mid,POINT(10984583.510474388 397287.84167945426) +BGF,Bangui M'Poko Int'l,7,mid,POINT(2062096.0099064007 490159.67590257555) +HGH,Hangzhou Xiaoshan Int'l,7,major,POINT(13406439.755097099 3533816.8107512025) +CXI,Cassidy Int'l,7,mid,POINT(-17516097.151531324 221142.74757068782) +SQQ,Šiauliai Int'l,7,mid,POINT(2603004.645166201 7539282.67806727) +IUE,Niue Int'l,7,mid,POINT(-18916090.23893756 -2163969.7053835443) +AGT,Guaraní Int'l,7,mid,POINT(-6104694.031044648 -2931964.4005073807) +AQP,Rodríguez Ballón Int'l,7,mid,POINT(-7966905.918635838 -1844658.508676942) +PRN,Pristina,7,major,POINT(2341078.8377142325 5249021.444376864) +ANR,Deurne,6,mid,POINT(495474.4567617634 6654816.798110057) +LAP,Gen. Márquez de León Int'l,6,mid,POINT(-12286020.26603441 2762682.794954582) +HRB,Harbin Taiping,6,major,POINT(14052636.670274785 5719757.353706103) +TRV,Trivandrum Int'l,6,mid,POINT(8562573.065500375 947061.8445487191) +ADB,Adnan Menderes,6,major,POINT(3022245.983700102 4620649.60509541) +NKG,Nanjing Lukou Int'l,6,major,POINT(13232113.963574817 3728617.8352103615) +FPO,Freeport Int'l,6,mid,POINT(-8761281.890977832 3067143.6374498503) +TIP,Tripoli Int'l,6,major,POINT(1463212.21662777 3851473.635896816) +YQX,Gander Int'l,6,mid,POINT(-6075324.874701395 6265805.045828414) +ABQ,Albuquerque Int'l,6,major,POINT(-11868515.102256078 4170563.5012235222) +ANU,V.C. Bird Int'l,6,mid,POINT(-6878694.903916862 1937169.899469179) +APW,Faleolo,6,mid,POINT(-19146654.327152207 -1555006.8367732386) +ATZ,Asyut,6,mid,POINT(3452713.0498732966 3129821.9526517875) +BAH,Bahrain Int'l,6,major,POINT(5635660.861027672 3032522.616286349) +BDL,Bradley Int'l,6,major,POINT(-8091301.130936581 5150546.8283608835) +BGI,Grantley Adams Int'l,6,mid,POINT(-6622109.180032715 1468834.8298397462) +BJL,Yundum Int'l,6,mid,POINT(-1853727.0337250433 1499045.0010975497) +BJM,Bujumbura Int'l,6,mid,POINT(3263997.0103294738 -370015.6602307333) +BLZ,Chileka Int'l,6,mid,POINT(3893059.018590655 -1767854.5001858468) +BME,Broome Int'l,6,mid,POINT(13607009.997033369 -2031998.4098272626) +BND,Bandar Abbass Int'l,6,mid,POINT(6274955.736911527 3149773.84868117) +BSR,Basrah Int'l,6,major,POINT(5306419.417106288 3574806.7582105543) +CJS,Ciudad Juarez Int'l,6,mid,POINT(-11848384.249120004 3715592.418746559) +CMB,Katunayake Int'l,6,major,POINT(8892786.169217823 801158.3816002631) +CNS,Cairns Int'l,6,mid,POINT(16225214.846172804 -1906481.8084950116) +CNX,Chiang Mai Int'l,6,major,POINT(11017080.514349962 2127740.252420455) +COS,City of Colorado Springs,6,major,POINT(-11655248.677721538 4692695.8242829) +CPE,Ign. Alberto Ongay Int'l,6,mid,POINT(-10074817.825470977 2251036.241428327) +CSX,Changsha Huanghua Int'l,6,major,POINT(12602930.864548087 3272939.761293922) +CVG,Greater Cincinnati Int'l,6,major,POINT(-9423881.727484405 4729612.966899267) +DAD,Da Nang,6,major,POINT(12045070.163087592 1810877.975522384) +DAL,Dallas Love Field,6,major,POINT(-10781277.518383406 3874672.1933670016) +DAM,Damascus Int'l,6,major,POINT(4064596.931310203 3950043.5050990772) +DAV,Enrique Malek Int'l,6,mid,POINT(-9176261.3632622 937468.2344106183) +DIR,Aba Tenna D. Yilma Int'l,6,mid,POINT(4659584.164069928 1075134.0681996795) +DPS,Bali Int'l,6,major,POINT(12819811.150600947 -977263.4484991981) +DSM,Des Moines Int'l,6,major,POINT(-10424899.020633116 5091248.620985529) +EBB,Entebbe Int'l,6,mid,POINT(3611511.224070954 5002.804114694496) +FKI,Kisangani Bangoka Int'l,6,mid,POINT(2819752.923880007 54794.9256193094) +FOC,Fuzhou Changle Int'l,6,mid,POINT(13321385.702380756 2990639.416433904) +GAU,Lokpriya G. Bordoloi Int'l,6,mid,POINT(10195555.021415079 3012122.158586034) +GDN,Gdansk Lech Walesa,6,major,POINT(2055897.583295918 7242589.051225524) +GND,Point Salines Int'l,6,mid,POINT(-6877969.693178219 1346535.5994887853) +GOJ,Nizhny Novgorod Int'l,6,mid,POINT(4874639.726208561 7602046.889043493) +GYM,Gen. José M. Yáñez Int'l,6,mid,POINT(-12347741.737370215 3245123.359448489) +HET,Hohhot Baita Int'l,6,mid,POINT(12447153.443595309 4990839.333622971) +HLN,Helena Reg.,6,mid,POINT(-12466658.296454819 5878679.980952085) +HMO,Gen. Ignacio P. Garcia Int'l,6,mid,POINT(-12362241.150184356 3387115.853910557) +IAD,Dulles Int'l,6,major,POINT(-8621448.832725875 4714909.1145619275) +ITO,Hilo Int'l,6,mid,POINT(-17258932.634703662 2239275.227556005) +JAN,Jackson Int'l,6,major,POINT(-10027114.112388613 3804079.9932397893) +JAX,Jacksonville Int'l,6,major,POINT(-9092974.16751545 3566866.3085019784) +KCH,Kuching Int'l,6,mid,POINT(12283197.11408071 165573.82391521157) +KGL,Kigali Int'l,6,mid,POINT(3354599.1425852366 -218635.81727042914) +KRK,Kraków-Balice,6,major,POINT(2204234.7085141954 6458799.948884591) +KUF,Kurumoch,6,major,POINT(5582368.062474931 7077594.627804628) +KWL,Guilin Liangjiang Int'l,6,major,POINT(12250364.147553366 2902496.320094184) +LAO,Laoag Int'l,6,mid,POINT(13417769.721492335 2058911.3483071644) +LGA,LaGuardia,6,major,POINT(-8223391.845423915 4979145.077158573) +LGW,London Gatwick,6,major,POINT(-18140.806687841443 6648891.672311942) +LJU,Ljubljana,6,major,POINT(1609102.3812974975 5817371.431276469) +LKO,Amausi Int'l,6,mid,POINT(9003984.837297449 3094009.1124208826) +LPG,La Plata,6,mid,POINT(-6444884.450607653 -4159199.717766613) +MAM,Gen. Sevando Canales,6,mid,POINT(-10857081.409642603 2970726.2244444657) +MAN,Manchester Int'l,6,major,POINT(-253070.56785998048 7050332.194180284) +MCI,Kansas City Int'l,6,major,POINT(-10543727.412002195 4764432.868334554) +MCT,Seeb Int'l,6,major,POINT(6488866.604611139 2703353.1666357466) +MIR,Habib Bourguiba Int'l,6,mid,POINT(1197059.4706727997 4267745.255641953) +MRS,Marseille Provence Airport,6,major,POINT(581241.2712448809 5379348.463539454) +NLD,Quetzalcoatl Int'l,6,mid,POINT(-11083859.971345954 3179767.747258902) +NNG,Nanning Wuwu Int'l,6,major,POINT(12041208.046387773 2585168.021510625) +OAX,Xoxocotlán Int'l,6,mid,POINT(-10767021.072484456 1920890.1412714333) +OGG,Kahului,6,mid,POINT(-17414535.00200481 2379158.9225228424) +OKC,Will Rogers,6,major,POINT(-10864350.131798934 4217728.367585358) +ORF,Norfolk Int'l,6,major,POINT(-8483037.583616761 4424932.157094424) +PBI,Palm Beach Int'l,6,major,POINT(-8915599.094680732 3084600.285946331) +PBM,Pengel Int'l,6,mid,POINT(-6144826.027689607 608279.097201853) +PEE,Bolshesavino,6,mid,POINT(6236068.92505864 7950482.8029074315) +PEN,Penang Int'l,6,mid,POINT(11161536.283915056 590015.50060982) +PHC,Port Harcourt Int'l,6,mid,POINT(773659.0426649383 558087.8629605463) +PHE,Port Hedland Int'l,6,mid,POINT(13206031.324724847 -2317879.5075577046) +PIR,Pierre Regional,6,mid,POINT(-11164525.835715491 5524461.520754504) +PIT,Greater Pittsburgh Int'l,6,major,POINT(-8934071.419685839 4938291.210183067) +PPG,Pago Pago Int'l,6,mid,POINT(-19003718.41285743 -1611996.4852122897) +BHX,Birmingham Int'l,6,major,POINT(-192998.1304997863 6882435.083284423) +ROB,Roberts Int'l,6,mid,POINT(-1152500.1711332249 696216.3320630078) +RPR,Raipur,6,mid,POINT(9099297.210737927 2414069.4006553018) +SAL,El Salvador Int'l,6,mid,POINT(-9913802.55280452 1510589.7948265967) +SAN,San Diego Int'l,6,major,POINT(-13046367.24962344 3859820.2444341164) +SAT,San Antonio Int'l,6,major,POINT(-10961849.557716547 3442845.280307546) +SAV,Savannah Int'l,6,major,POINT(-9040251.926102847 3781128.8879454345) +SCU,Antonio Maceo,6,mid,POINT(-8442457.687705172 2269765.034226519) +SLP,Ponciano Arriaga Int'l,6,mid,POINT(-11236197.312973397 2542254.4548414224) +SMF,Sacramento Int'l,6,major,POINT(-13535102.5444142 4677751.995727176) +STI,Cibao Int'l,6,mid,POINT(-7869639.932903658 2209868.2184448238) +SVX,Koltsovo,6,major,POINT(6768871.065442893 7705587.7723963475) +SYR,Syracuse Hancock Int'l,6,major,POINT(-8472869.196888562 5332052.351045366) +TBZ,Tabriz,6,mid,POINT(5147937.944627146 4597963.95987773) +TRC,Torreon Int'l,6,mid,POINT(-11510300.409719147 2945083.230322099) +TUL,Tulsa Int'l,6,major,POINT(-10674412.866695007 4326814.267148107) +TYS,Mcghee Tyson,6,major,POINT(-9349717.11130138 4273924.972021594) +UFA,Ufa Int'l,6,major,POINT(6220987.0330724865 7277920.512101227) +UVF,Hewanorra Int'l,6,mid,POINT(-6784920.044200947 1544005.8226310255) +WDH,Windhoek Hosea Kutako Int'l,6,mid,POINT(1943997.4151090814 -2570091.085152095) +YAM,Sault Ste Marie,6,mid,POINT(-9406564.76559836 5858481.891514792) +YDQ,Dawson Cr.,6,mid,POINT(-13378999.305004613 7506713.833720897) +YEG,Edmonton Int'l,6,major,POINT(-12644167.874282043 7040025.029325383) +YHZ,Halifax Int'l,6,major,POINT(-7070453.58940109 5603678.038233298) +YKA,Kamloops,6,mid,POINT(-13407512.584182765 6569310.967237297) +YSB,Sudbury,6,mid,POINT(-8994144.506948028 5880713.333772309) +YSJ,Saint John,6,mid,POINT(-7334903.294303742 5673501.878975562) +YXS,Prince George,6,mid,POINT(-13656008.854867056 7148264.7668254785) +YYJ,Victoria Int'l,6,major,POINT(-13740234.272035994 6214085.427919952) +ZAM,Zamboanga Int'l,6,mid,POINT(13587927.81103305 772183.3765967496) +ZGC,Lanzhou Zhongchuan,6,mid,POINT(11534415.276547482 4370732.390306705) +ALB,Albany Int'l,6,mid,POINT(-8216419.467543748 5273338.785275772) +MKE,General Mitchell Int'l,6,major,POINT(-9785217.637839831 5304048.077202676) +SYX,Sanya Phoenix Int'l,6,major,POINT(12179269.508735549 2073759.6077127168) +LXA,Lhasa Gonggar,6,mid,POINT(10119004.165502802 3413080.0215534503) +HTN,Hotan,6,mid,POINT(8891343.82250943 4444688.808475181) +DRS,Dresden,6,major,POINT(1532309.1332567444 6643450.815136505) +QNJ,Annemasse,6,mid,POINT(697406.6860926346 5811770.492153249) +NOG,Nogales Int'l,6,mid,POINT(-12353426.827246323 3662074.208101043) +SXB,Strasbourg,6,mid,POINT(849127.4836056725 6197956.03352781) +CGN,Cologne/Bonn,6,major,POINT(792857.4612004214 6599787.453002975) +PUS,Kimhae Int'l,6,major,POINT(14354514.907917595 4187046.214216792) +CJU,Jeju Int'l,6,major,POINT(14080983.774638996 3965160.3238162454) +SVG,Stavanger Sola,6,major,POINT(626707.6191673423 8154959.856244158) +TRD,Trondheim Vaernes,6,major,POINT(1215253.6773149248 9216940.83464713) +CMG,Corumbá Int'l,6,mid,POINT(-6419083.467902136 -2156603.837024819) +FNC,Madeira,6,mid,POINT(-1867455.4190140315 3854673.5716768354) +IGU,Foz do Iguaçu Int'l,6,mid,POINT(-6065642.345935367 -2949337.010288327) +PVH,Gov. Jorge Teixeira de Oliveira Int'l,6,mid,POINT(-7113144.308019985 -973847.2772337806) +BIO,Bilbao,6,mid,POINT(-323504.4720013238 5358524.47907067) +PMI,Palma de Mallorca,6,major,POINT(303899.60521291156 4803041.199542769) +TFN,Tenerife N.,6,major,POINT(-1819663.7480067087 3310586.338264019) +GOT,Gothenburg,6,major,POINT(1368542.5514391668 7901590.990691348) +LLA,Lulea,6,major,POINT(2462724.114293375 9754498.931349153) +AUH,Abu Dhabi Int'l,6,major,POINT(6083201.553908446 2805554.6889186236) +CZL,Mohamed Boudiaf Int'l,6,mid,POINT(737151.7293399558 4339692.82560165) +ASW,Aswan Int'l,6,mid,POINT(3653999.6398335784 2749542.9443677575) +RVN,Rovaniemi,6,mid,POINT(2875320.2169282665 10031697.623810166) +GEO,Cheddi Jagan Int'l,6,mid,POINT(-6484818.885129661 724971.6536276314) +COK,Cochin Int'l,6,major,POINT(8503753.771157987 1136428.3072449402) +EDL,Eldoret Int'l,6,mid,POINT(3921083.575946768 45092.72602596125) +ICN,Incheon Int'l,6,major,POINT(14076447.124545597 4501907.048961273) +CUL,Federal de Bachigualato Int'l,6,mid,POINT(-11963490.513047652 2847128.8221669463) +BRU,Brussels,5,major,POINT(499227.8772678698 6603146.459346397) +ABV,Abuja Int'l,5,major,POINT(809321.6344269889 1006514.3393965173) +ACV,Arcata-Eureka,5,mid,POINT(-13815535.337549312 5008201.421188738) +AUS,Austin-Bergstrom Int'l,5,major,POINT(-10872222.536025645 3529555.5113469767) +AYT,Antalya,5,major,POINT(3428924.4754565703 4427310.511050465) +BFS,Belfast Int'l,5,major,POINT(-691980.8164504651 7296456.465229552) +BGY,Orio Al Serio,5,major,POINT(1079678.5789416616 5726906.131400019) +BKI,Kota Kinabalu Int'l,5,mid,POINT(12918748.008068882 660511.0352292418) +BLR,Bengaluru Int'l,5,major,POINT(8650588.425101113 1482660.900483537) +CBR,Canberra Int'l,5,major,POINT(16607839.494337387 -4205705.194211562) +CMH,Port Columbus Int'l,5,major,POINT(-9226608.086035373 4865668.824096434) +CMN,Mohamed V Int'l,5,major,POINT(-843963.8203070451 3945149.052797037) +DUS,Düsseldorf Int'l,5,major,POINT(753070.173213446 6670649.207078674) +ESB,Esenboğa Int'l,5,major,POINT(3672765.081531403 4882686.472655303) +HLZ,Hamilton Int'l,5,mid,POINT(19518338.88750571 -4560490.963686959) +HYD,Rajiv Gandhi Int'l,5,major,POINT(8730736.025647417 1948312.2526838314) +JFK,John F Kennedy Int'l,5,major,POINT(-8213856.333664716 4960260.200711149) +KBP,Boryspil Int'l,5,major,POINT(3439233.719799671 6505524.780690998) +KRT,Khartoum,5,major,POINT(3623466.4902241807 1757547.5872633066) +MSN,Dane Cty. Reg. (Truax Field),5,major,POINT(-9945927.267286837 5332742.418426139) +MSQ,Minsk Int'l,5,major,POINT(3120752.126812064 7149233.820337807) +PMO,Palermo,5,major,POINT(1458901.0362557853 4605782.259097994) +PVD,T.F. Green,5,mid,POINT(-7952195.115392729 5120023.826151223) +RSW,Southwest Florida Int'l,5,major,POINT(-9100938.677789772 3064615.4819997395) +SHE,Shenyang Taoxian Int'l,5,major,POINT(13746618.432678014 5106428.650843387) +SHJ,Sharjah Int'l,5,major,POINT(6180514.589518913 2915248.2277230374) +SJC,San Jose Int'l,5,major,POINT(-13573121.947161395 4490734.915231283) +SNA,John Wayne,5,major,POINT(-13120280.96415105 3985845.5583448987) +STR,Stuttgart,5,major,POINT(1023465.9536564335 6222441.373772455) +SYQ,Nacional Tobías Bolaños,5,mid,POINT(-9366267.131882029 1114174.215609692) +SZX,Shenzhen Bao'an Int'l,5,major,POINT(12669922.772452988 2589325.3338162964) +SDF,Louisville Int'l,5,major,POINT(-9544722.690962108 4605737.732605023) +GVA,Geneva,5,major,POINT(679933.413380882 5817446.253277886) +LYS,Lyon-Saint Exupery,5,mid,POINT(565051.5367897527 5735754.425198433) +KIX,Kansai Int'l,5,major,POINT(15055344.3944834 4087334.751755599) +LIS,Lisbon Portela,5,major,POINT(-1016424.252233458 4688888.236573916) +CNF,Tancredo Neves Int'l,5,major,POINT(-4894003.508365724 -2229579.2920284085) +BMA,Bromma,5,mid,POINT(1997697.0065920444 8257643.750388128) +SUB,Juanda Int'l,5,major,POINT(12554282.06428334 -824220.7126577058) +MDQ,Astor Piazzolla Int'l,5,mid,POINT(-6409956.071233664 -4569995.781535456) +GCM,Owen Roberts Int'l,5,major,POINT(-9056694.465125477 2189805.779300135) +CGO,Zhengzhou Xinzheng Int'l,5,major,POINT(12672814.691606456 4099692.461673756) +DLC,Dalian Zhoushuizi Int'l,5,major,POINT(13529649.993568424 4716168.331747865) +HER,Heraklion Int'l,5,major,POINT(2802363.0755655468 4209759.612251141) +TBS,Tbilisi Int'l,5,major,POINT(5005438.002564791 5111591.3559101885) +XXC,Cascais,5,mid,POINT(-1041347.3496455223 4682147.39413741) +KHH,Kaohsiung Int'l,4,major,POINT(13396761.523445146 2580305.254992591) +SKO,Sadiq Abubakar III,4,mid,POINT(578886.5281774073 1450306.3955330413) +UIO,Mariscal Sucre Int'l,4,mid,POINT(-8737466.003542578 -16202.837421707412) +KHI,Karachi Civil,4,mid,POINT(7475340.734641231 2863285.7669967795) +KIV,Kishinev S.E.,4,mid,POINT(3221146.21311236 5931334.246820844) +LIM,Jorge Chávez,4,major,POINT(-8583574.949268589 -1348407.577723065) +YQT,Thunder Bay Int'l,4,mid,POINT(-9942182.182879409 6168947.109715085) +VNO,Vilnius,4,major,POINT(2814236.4820729117 7292896.273539033) +XIY,Hsien Yang,4,major,POINT(12106641.539417576 4088434.13068678) +NTR,Del Norte Int'l,4,mid,POINT(-11158486.998785185 2981736.0350810615) +TBU,Fua'amotu Int'l,4,mid,POINT(-19496009.707956623 -2421547.4825201) +IFN,Esfahan Int'l,4,mid,POINT(5774853.506151178 3861648.6978803067) +HRE,Harare Int'l,4,mid,POINT(3462192.0109579186 -2028514.3953444536) +KWI,Kuwait Int'l,4,major,POINT(5340161.011103197 3406187.448098151) +YOW,Macdonald-Cartier Int'l,4,major,POINT(-8422977.402770454 5672061.751251567) +KBL,Kabul Int'l,4,mid,POINT(7704430.153932341 4104705.8192228535) +ABJ,Abidjan Port Bouet,4,mid,POINT(-437732.64923689933 585738.5549131387) +ACA,General Juan N Alvarez Int'l,4,major,POINT(-11104621.097451538 1893134.144431517) +ACC,Kotoka Int'l,4,major,POINT(-19080.478612675117 625164.9555515812) +ADD,Bole Int'l,4,mid,POINT(4318438.20565262 1003961.98704289) +ADE,Aden Int'l,4,mid,POINT(5012783.684754567 1440065.6190378706) +ADL,Adelaide Int'l,4,mid,POINT(15421322.992793165 -4155809.950948155) +ALA,Almaty Int'l,4,major,POINT(8572941.731988944 5364861.409593734) +ALG,Houari Boumediene,4,major,POINT(357566.3903251246 4397333.961514649) +ALP,Aleppo Int'l,4,major,POINT(4144128.6888790904 4326055.140346658) +AMD,Sardar Vallabhbhai Patel Int'l,4,mid,POINT(8084121.61879358 2640576.3496694164) +ANF,Cerro Moreno Int'l,4,mid,POINT(-7841455.23250036 -2686403.1434665117) +ASB,Ashkhabad Northwest,4,mid,POINT(6497046.966982166 4577286.332697623) +ASM,Yohannes Iv Int'l,4,mid,POINT(4331035.517420361 1723061.729802367) +ASU,Silvio Pettirossi Int'l,4,mid,POINT(-6402418.9325783895 -2905456.3343431936) +BDA,Bermuda Int'l,4,mid,POINT(-7202679.862234498 3810550.6844580285) +BEG,Surcin,4,major,POINT(2258815.4686191813 5593083.638402032) +BEY,Beirut Int'l,4,major,POINT(3951072.18915956 4005386.9057885306) +BHO,Bairagarh,4,mid,POINT(8609546.429699581 2666590.0416751173) +BKO,Bamako Sénou,4,mid,POINT(-884686.3022593991 1407150.750920686) +BNA,Nashville Int'l,4,major,POINT(-9647980.866827387 4318728.98384897) +BNE,Brisbane Int'l,4,major,POINT(17045268.97468119 -3171705.026698056) +BOI,Boise Air Terminal,4,major,POINT(-12937756.167030284 5398978.142473312) +BRW,Wiley Post Will Rogers Mem.,4,mid,POINT(-17451760.842927106 11501923.229607187) +BUF,Greater Buffalo Int'l,4,major,POINT(-8764405.765343236 5301936.494910232) +BUQ,Bulawayo,4,mid,POINT(3186247.9186200458 -2274875.31003813) +BWN,Brunei Int'l,4,major,POINT(12794296.285614245 551212.6610358991) +CAN,Guangzhou Baiyun Int'l,4,major,POINT(12612221.85073016 2679148.220264759) +CCP,Carriel Sur Int'l,4,mid,POINT(-8133236.455642944 -4407981.701992263) +CCU,Netaji Subhash Chandra Bose Int'l,4,major,POINT(9845095.878525952 2589190.441204464) +CGP,Chittagong,4,mid,POINT(10220766.844263365 2541033.9946976844) +CHC,Christchurch Int'l,4,major,POINT(19206917.506066456 -5386632.063747829) +CKY,Conakry,4,mid,POINT(-1516290.089455364 1070787.8551788528) +CLE,Hopkins Int'l,4,major,POINT(-9110213.535620524 5073182.834634756) +CLO,Alfonso Bonilla Aragón Int'l,4,mid,POINT(-8503147.260560917 394688.4893136592) +COO,Cotonou Cadjehon,4,mid,POINT(265363.4102164551 709253.9879953585) +COR,Ingeniero Ambrosio L.V. Taravella Int'l,4,mid,POINT(-7148082.293848249 -3673814.5723595982) +CTG,Rafael Nunez,4,mid,POINT(-8405994.675909493 1169219.3223426025) +CUN,Cancún,4,major,POINT(-9670815.8913069 2396672.2574249594) +CUU,General R F Villalobos Int'l,4,mid,POINT(-11796437.906151637 3338025.3503014464) +DAC,Zia Int'l Dhaka,4,mid,POINT(10063830.122687222 2734912.287256658) +DUR,Louis Botha,4,mid,POINT(3444860.4784031203 -3499169.190276895) +FBM,Lubumbashi Luano Int'l,4,mid,POINT(3064536.525946187 -1299173.7321592586) +FEZ,Saiss,4,mid,POINT(-554609.9977210398 4019477.066397836) +FIH,Kinshasa N Djili Int'l,4,mid,POINT(1719498.3187440601 -489078.6235351551) +FNA,Freetown Lungi,4,mid,POINT(-1469442.8461813862 962699.2713895586) +FNJ,Sunan,4,mid,POINT(13990112.802587004 4750385.930860793) +FRU,Vasilyevka,4,major,POINT(8289828.933824627 5320416.129324652) +GBE,Sir Seretse Khama Int'l,4,mid,POINT(2885888.8727273894 -2821560.5498353736) +GDL,Don Miguel Hidalgo Int'l,4,major,POINT(-11499388.694471747 2335291.916174427) +GLA,Glasgow Int'l,4,major,POINT(-493332.13497472147 7531425.677465869) +GUA,La Aurora,4,mid,POINT(-10077773.662741432 1641782.3913222759) +GYE,Simon Bolivar Int'l,4,mid,POINT(-8892981.234559957 -240321.9210915182) +HAN,Noi Bai,4,major,POINT(11778020.624519257 2417481.0019598627) +HAV,José Martí Int'l,4,major,POINT(-9173552.10200181 2631698.5715378257) +HBE,Borg El Arab Int'l,4,mid,POINT(3305372.4611101435 3622152.6313410117) +JED,King Abdul Aziz Int'l,4,major,POINT(4358213.688462168 2472032.7516484386) +KAN,Kano Mallam Aminu Int'l,4,mid,POINT(948679.9717885901 1350910.618957436) +KHG,Kashi,4,mid,POINT(8461730.10186632 4799026.553333653) +KIN,Norman Manley Int'l,4,major,POINT(-8546964.648042452 2030243.1181430449) +KTM,Tribhuvan Int'l,4,mid,POINT(9501913.308235684 3211238.4485011543) +LAD,Luanda 4 de Fevereiro,4,mid,POINT(1473290.7236730263 -988928.4553551358) +LED,Pulkovo 2,4,major,POINT(3373770.677315162 8356540.564011781) +LHE,Allama Iqbal Int'l,4,mid,POINT(8283381.384422085 3700549.0056644054) +LLW,Kamuzu Int'l,4,mid,POINT(3760682.8136171373 -1549976.8890777947) +LOS,Lagos Murtala Muhammed,4,major,POINT(369705.8718163213 733902.6348314694) +LPB,El Alto Int'l,4,mid,POINT(-7589540.858655074 -1863845.88440594) +LUN,Lusaka Int'l,4,mid,POINT(3166543.5091625582 -1726897.713806871) +LXR,Luxor,4,mid,POINT(3640514.3787480188 2958640.933926492) +MAA,Chennai Int'l,4,major,POINT(8923790.721397618 1457736.4547069867) +MAR,La Chinita Int'l,4,mid,POINT(-7984253.421644532 1181766.0034481264) +MDE,José María Córdova,4,mid,POINT(-8396490.305061044 688284.7554102064) +MEM,Memphis Int'l,4,major,POINT(-10016709.013641983 4169917.9414620516) +MGA,Augusto Cesar Sandino Int'l,4,mid,POINT(-9592543.525229517 1362173.336649747) +MHD,Mashhad,4,major,POINT(6639338.705659545 4331977.286739136) +MIA,Miami Int'l,4,major,POINT(-8936614.265272027 2973705.5605749027) +MID,Lic M Crecencio Rejon Int'l,4,mid,POINT(-9981242.127203269 2383993.8907446656) +MLA,Luqa,4,major,POINT(1613605.4581007103 4279854.343021202) +MBA,Moi Int'l,4,major,POINT(4408548.300038625 -449283.9321265271) +MSU,Moshoeshoe I Int'l,4,mid,POINT(3067877.8954956117 -3433759.170427422) +MSY,New Orleans Int'l,4,major,POINT(-10047329.210982308 3501636.5492488067) +MVD,Carrasco Int'l,4,major,POINT(-6236856.606664128 -4142301.2089914605) +MZT,General Rafael Buelna Int'l,4,mid,POINT(-11829924.136495678 2652178.0890191356) +NAS,Nassau Int'l,4,major,POINT(-8623347.347921519 2881726.410002533) +NDJ,Ndjamena,4,mid,POINT(1673470.874236594 1360454.4521161444) +NIM,Niamey,4,mid,POINT(242376.6744629448 1514253.6866089322) +CEB,Mactan-Cebu Int'l,4,major,POINT(13801294.122495249 1154613.0887848875) +NOV,Nova Lisboa,4,mid,POINT(1753255.46880754 -1437182.0224418598) +OMA,Eppley Airfield,4,mid,POINT(-10675474.133706558 5056650.0074564) +OME,Nome,4,mid,POINT(-18416879.31141433 9479752.238161596) +OUA,Ouagadougou,4,mid,POINT(-168516.04201913223 1385974.7608973612) +PAP,Mais Gate Int'l,4,mid,POINT(-8047784.48102509 2105042.584124078) +PBC,Puebla,4,mid,POINT(-10951139.486271285 2174229.474607877) +PDX,Portland Int'l,4,major,POINT(-13646961.267213944 5714722.187812213) +PER,Perth Int'l,4,major,POINT(12910191.665713944 -3755585.4761783686) +PLZ,H F Verwoerd,4,mid,POINT(2851090.0581930364 -4026665.5988787916) +PMC,El Tepual Int'l,4,mid,POINT(-8137274.89956818 -5076477.117562315) +PNH,Pochentong,4,major,POINT(11671295.08602542 1294838.2054215844) +POM,Port Moresby Int'l,4,major,POINT(16387481.484328216 -1055490.819211655) +PTY,Tocumen Int'l,4,major,POINT(-8837335.423871214 1013558.7581657403) +PUQ,Carlos Ibáñez de Campo Int'l,4,mid,POINT(-7886220.465965787 -6983935.755957038) +RDU,Durham Int'l,4,major,POINT(-8771016.456421759 4283467.088130306) +RGN,Mingaladon,4,major,POINT(10701609.591973055 1909321.845834955) +RIX,Riga,4,major,POINT(2669372.2722532493 7744193.62308274) +SAH,Sanaa Int'l,4,mid,POINT(4923065.16120369 1743876.971600611) +SDA,Baghdad Int'l,4,major,POINT(4923540.021769185 3930952.448150425) +SDQ,De Las Américas Int'l,4,major,POINT(-7756349.458504393 2087967.005676945) +SGN,Tan Son Nhat,4,major,POINT(11873809.566306261 1211281.1826867384) +SKG,Thessaloniki,4,major,POINT(2557725.0846379087 4942364.782177572) +SOF,Vrazhdebna,4,major,POINT(2605149.055067963 5264781.530419111) +STV,Surat,4,major,POINT(8097651.205896711 2406246.8115825886) +SUV,Nausori Int'l,4,mid,POINT(19877213.660566133 -2042921.722827706) +SYZ,Shiraz Int'l,4,major,POINT(5854266.559214414 3445299.4359969352) +TAM,Gen Francisco J Mina Int'l,4,mid,POINT(-10894817.831443883 2546298.6230297326) +TGU,Toncontin Int'l,4,mid,POINT(-9709198.226587674 1581099.3950811415) +THR,Mehrabad Int'l,4,major,POINT(5713006.099195896 4258237.6324800905) +TIA,Tirane Rinas,4,major,POINT(2194667.3682943927 5074616.749494774) +TIJ,General Abelardo L Rodriguez Int'l,4,major,POINT(-13021650.434262715 3835203.5362474006) +TLC,Jose Maria Morelos Y Pavon,4,mid,POINT(-11084153.994323695 2194851.812577192) +TLL,Ulemiste,4,major,POINT(2760608.141526787 8270958.623677219) +TLV,Ben Gurion,4,major,POINT(3881805.256411502 3763408.663236236) +TMS,São Tomé Salazar,4,mid,POINT(747267.9190398284 41716.632476319064) +TNR,Antananarivo Ivato,4,mid,POINT(5284932.232617036 -2131325.033802293) +TPA,Tampa Int'l,4,major,POINT(-9187734.608451089 3246457.5340377474) +VLN,Zim Valencia,4,mid,POINT(-7561082.719277231 1136302.428570875) +VOG,Gumrak,4,mid,POINT(4937550.184505996 6239586.890587481) +VTE,Vientiane,4,mid,POINT(11417844.047511633 2034676.3332973232) +VVI,Viru Viru Int'l,4,mid,POINT(-7028755.9321344495 -1996382.161586104) +WLG,Wellington Int'l,4,major,POINT(19459945.56238444 -5060989.272103053) +YPR,Prince Rupert,4,mid,POINT(-14521136.321069665 7225652.708574609) +YQG,Windsor,4,mid,POINT(-9235074.723264633 5200890.330909885) +YQR,Regina,4,mid,POINT(-11650189.618887361 6521642.58581496) +YVR,Vancouver Int'l,4,major,POINT(-13712431.390338305 6307768.359333472) +YWG,Winnipeg Int'l,4,major,POINT(-10823234.470095292 6429551.143631333) +YXE,John G Diefenbaker Int'l,4,mid,POINT(-11876696.72926981 6830948.321270529) +YXY,Whitehorse Int'l,4,mid,POINT(-15036614.925437473 8560481.626639808) +YYC,Calgary Int'l,4,major,POINT(-12691597.53994198 6644473.5798023585) +YYG,Charlottetown,4,mid,POINT(-7027736.836872493 5826269.873133451) +YYQ,Churchill,4,mid,POINT(-10473089.528120976 8126488.101793833) +YYT,St John's Int'l,4,mid,POINT(-5871361.054995282 6042731.093369723) +YZF,Yellowknife,4,mid,POINT(-12739162.781496543 8971635.068532733) +ZAG,Zagreb,4,major,POINT(1787959.5376854797 5737717.133504335) +ZNZ,Zanzibar,4,mid,POINT(4366210.024200951 -693611.1847225152) +REK,Reykjavik Air Terminal,4,mid,POINT(-2443088.1663828716 9383331.11811138) +ARH,Arkhangelsk-Talagi,4,mid,POINT(4532189.008602291 9502944.831155792) +KZN,Kazan Int'l,4,major,POINT(5487877.883761077 7480783.907934758) +ORY,Paris Orly,4,major,POINT(263535.4390255202 6229391.517204392) +YQB,Québec,4,major,POINT(-7946422.523706657 5908118.849419109) +YUL,Montréal-Trudeau,4,major,POINT(-8209736.33297653 5693970.45509647) +NRT,Narita Int'l,4,major,POINT(15627520.11358417 4268204.230262293) +NGO,Chubu Centrair Int'l,4,major,POINT(15230150.672647433 4144740.3065854395) +OKD,Okadama,4,mid,POINT(15738583.429385802 5328829.038063646) +BGO,Bergen Flesland,4,major,POINT(581895.1550787452 8464377.255090103) +TOS,Tromsø Langnes,4,major,POINT(2104746.825914806 10965250.808064152) +BEL,Val de Caes Int'l,4,mid,POINT(-5396719.96529551 -154721.0211878832) +CGR,Campo Grande Int'l,4,mid,POINT(-6085719.662636482 -2327280.4482070403) +CWB,Afonso Pena Int'l,4,mid,POINT(-5473992.287078164 -2941725.146308365) +FOR,Pinto Martins Int'l,4,mid,POINT(-4290336.3586437125 -420936.50489259965) +GRU,São Paulo-Guarulhos Int'l,4,major,POINT(-5174325.1429146975 -2683632.0960700875) +GYN,Santa Genoveva,4,mid,POINT(-5479885.220794013 -1878071.977261713) +POA,Salgado Filho Int'l,4,mid,POINT(-5697002.13872884 -3502289.309565192) +REC,Gilberto Freyre Int'l,4,mid,POINT(-3887083.6703741536 -908262.6386146299) +SSA,Deputado Luis Eduardo Magalhaes Int'l,4,mid,POINT(-4267410.303359899 -1449949.9581741774) +MDZ,El Plumerillo,4,mid,POINT(-7658612.188356534 -3872469.558067387) +MAO,Eduardo Gomes Int'l,4,mid,POINT(-6684297.334286057 -337693.8319007795) +NSI,Yaoundé Nsimalen Int'l,4,mid,POINT(1285516.8273129629 413824.7139555897) +PVG,Shanghai Pudong Int'l,4,major,POINT(13558883.0462749 3652545.2074926775) +ADJ,Marka Int'l,4,mid,POINT(4005732.2715503518 3759924.3739497215) +MLE,Male Int'l,4,major,POINT(8185031.645738038 466699.95605840435) +VER,Gen. Heriberto Jara Int'l,4,mid,POINT(-10707106.058942689 2171711.1956018056) +OXB,Osvaldo Vieira Int'l,4,mid,POINT(-1742282.0070356948 1333069.743587473) +DVO,Francisco Bangoy Int'l,4,major,POINT(13986744.835649133 795824.7667777662) +SEZ,Seychelles Int'l,4,mid,POINT(6179517.693399666 -520557.99872006755) +DKR,Léopold Sedar Senghor Int'l,4,major,POINT(-1947023.3020539244 1659902.3057268695) +PZU,Port Sudan New Int'l,4,mid,POINT(4145398.4204000095 2206112.046227745) +TAS,Tashkent Int'l,4,major,POINT(7710724.168742622 5051098.386031781) +CPH,Copenhagen,3,major,POINT(1408119.2975413958 7484813.53657096) +BBU,Aeroportul National Bucuresti-Baneasa,3,mid,POINT(2903849.641648274 5542685.465268317) +BUD,Ferihegy,3,major,POINT(2144261.6538210036 6013084.452825525) +CKG,Chongqing Jiangbei Int'l,3,major,POINT(11870890.052742634 3468127.1353697367) +CLT,Douglas Int'l,3,major,POINT(-9010636.818187172 4193876.9457937614) +DTW,Detroit Metro,3,major,POINT(-9278245.06384242 5194851.4716006685) +DUB,Dublin,3,major,POINT(-695066.0887943477 7062376.814087123) +FAI,Fairbanks Int'l,3,major,POINT(-16460336.780947097 9560620.05078907) +HAM,Hamburg,3,major,POINT(1113823.6215953932 7100767.499507231) +KUL,Kuala Lumpur Int'l,3,major,POINT(11322738.032386623 305969.0287561493) +LAS,Mccarran Int'l,3,major,POINT(-12818586.746428112 4312318.07591977) +MCO,Orlando Int'l,3,major,POINT(-9051095.174284099 3303441.5793951172) +MSP,Minneapolis St. Paul Int'l,3,major,POINT(-10375878.27120046 5602968.032960929) +MUC,Franz-Josef-Strauss,3,major,POINT(1312241.1393453805 6165923.947863139) +PHL,Philadelphia Int'l,3,major,POINT(-8376010.861425873 4847951.919079063) +PHX,Sky Harbor Int'l,3,major,POINT(-12469300.843246758 3953301.1263070833) +SLC,Salt Lake City Int'l,3,major,POINT(-12465777.534860881 4980934.952813189) +STL,Lambert St Louis Int'l,3,major,POINT(-10059492.043891825 4684884.61245935) +WAW,Okecie Int'l,3,major,POINT(2334673.2165327673 6831108.554968304) +ZRH,Zurich Int'l,3,major,POINT(953141.1684417519 6016230.716705278) +CRL,Gosselies,3,mid,POINT(495858.6042741584 6525822.268641248) +MUCf,Munich Freight Terminal,3,major,POINT(1310172.8388047176 6165247.042460089) +BCN,Barcelona,3,major,POINT(231322.27476789098 5057160.357546904) +PRG,Ruzyn,3,major,POINT(1588249.1634762501 6464940.042104144) +HKG,Hong Kong Int'l,2,major,POINT(12683188.007765977 2549427.0516792163) +TPE,Taoyuan,2,major,POINT(13495414.427034626 2885173.4874935) +AMS,Schiphol,2,major,POINT(530368.0140539851 6856178.306602677) +SIN,Singapore Changi,2,major,POINT(11575714.642650735 150981.23199442241) +LHR,London Heathrow,2,major,POINT(-50445.167757272204 6705034.149615074) +AKL,Auckland Int'l,2,major,POINT(19457725.202241894 -4439992.645248473) +ANC,Anchorage Int'l,2,major,POINT(-16695889.266499205 8665630.66786446) +ATL,Hartsfield-Jackson Atlanta Int'l,2,major,POINT(-9398192.25232862 3980635.393529892) +PEK,Beijing Capital,2,major,POINT(12978536.162716437 4877394.999286327) +BOG,Eldorado Int'l,2,major,POINT(-8253602.394483802 523658.9916146796) +BOM,Chhatrapati Shivaji Int'l,2,major,POINT(8112359.3506964175 2166135.8344901367) +BOS,Gen E L Logan Int'l,2,major,POINT(-7905510.222605432 5216048.369740364) +BWI,Baltimore-Washington Int'l Thurgood Marshall,2,major,POINT(-8534714.280230813 4747400.94881119) +CAI,Cairo Int'l,2,major,POINT(3495403.4025467657 3517953.30796221) +CAS,Casablanca-Anfa,2,mid,POINT(-853065.6155118733 3970245.1886197724) +CCS,Simón Bolivar Int'l,2,mid,POINT(-7459045.837487549 1186476.4148235403) +CPT,Cape Town Int'l,2,major,POINT(2070281.6524537024 -4024834.418162735) +CTU,Chengdushuang Liu,2,major,POINT(11572344.177978337 3578461.952264905) +DEL,Indira Gandhi Int'l,2,major,POINT(8581378.678432528 3319661.314917636) +DEN,Denver Int'l,2,major,POINT(-11652233.819124809 4844090.472465843) +DFW,Dallas-Ft. Worth Int'l,2,major,POINT(-10802484.69436073 3882058.107395093) +DMK,Don Muang Int'l,2,major,POINT(11199027.825192804 1565071.6743661466) +DXB,Dubai Int'l,2,major,POINT(6161987.655757107 2906798.64459153) +EWR,Newark Int'l,2,major,POINT(-8257362.26367059 4966794.150215097) +EZE,Ministro Pistarini Int'l,2,major,POINT(-6516781.661053119 -4138585.256335913) +FLL,Fort Lauderdale Hollywood Int'l,2,major,POINT(-8921729.404289588 3007965.195522777) +IAH,George Bush Intercontinental,2,major,POINT(-10612499.47280319 3501826.345595435) +IST,Atatürk Hava Limani,2,major,POINT(3208177.553946064 5009073.443849705) +JNB,OR Tambo Int'l,2,major,POINT(3142770.591123307 -3015450.744144327) +JNU,Juneau Int'l,2,mid,POINT(-14981774.819715252 8043101.512335137) +LAX,Los Angeles Int'l,2,major,POINT(-13180502.507488059 4021308.4533828353) +LIN,Linate,2,major,POINT(1033041.1227508297 5694294.632713835) +MEL,Melbourne Int'l,2,major,POINT(16124516.709420556 -4532903.928433404) +MEX,Lic Benito Juarez Int'l,2,major,POINT(-11029825.463622067 2206273.099845724) +MNL,Ninoy Aquino Int'l,2,major,POINT(13470117.254215317 1632429.0789827209) +NBO,Jomo Kenyatta Int'l,2,major,POINT(4110482.0770391775 -148127.1963503032) +HNL,Honolulu Int'l,2,major,POINT(-17579549.84908658 2431513.0220979755) +ORD,Chicago O'Hare Int'l,2,major,POINT(-9785554.79848144 5157464.265554985) +RUH,King Khalid Int'l,2,major,POINT(5198823.826006537 2870713.4354381226) +SCL,Arturo Merino Benitez Int'l,2,major,POINT(-7880717.078713118 -3948096.228258569) +SEA,Tacoma Int'l,2,major,POINT(-13614628.614807313 6014780.892744967) +SFO,San Francisco Int'l,2,major,POINT(-13623665.600257758 4525464.67765373) +SHA,Hongqiao,2,major,POINT(13507638.791596299 3657092.0385201345) +SVO,Sheremtyevo,2,major,POINT(4165126.6204954362 7551739.134614029) +YYZ,Toronto-Pearson Int'l,2,major,POINT(-8862302.66056038 5416201.950167791) +SYD,Kingsford Smith,2,major,POINT(16827729.63768122 -4020289.8753952323) +HEL,Helsinki Vantaa,2,major,POINT(2779448.185729562 8471040.888425522) +CDG,Charles de Gaulle Int'l,2,major,POINT(282959.42553019867 6277308.537046448) +TXL,Berlin-Tegel Int'l,2,major,POINT(1479470.4406631375 6901000.847325224) +VIE,Vienna Schwechat Int'l,2,major,POINT(1843536.256921738 6126801.191600173) +FRA,Frankfurt Int'l,2,major,POINT(954210.9569556866 6455056.850128017) +FCO,Leonardo da Vinci Int'l,2,major,POINT(1363674.994060762 5130332.471894705) +ITM,Osaka Int'l,2,major,POINT(15077387.377306195 4135406.3270045333) +GMP,Gimpo Int'l,2,major,POINT(14115577.804574551 4517074.614662222) +OSL,Oslo Gardermoen,2,major,POINT(1235546.524975006 8442962.64811417) +BSB,Juscelino Kubitschek Int'l,2,major,POINT(-5334517.775719667 -1789672.7465510531) +CGH,Congonhas Int'l,2,major,POINT(-5194068.981688568 -2708004.705806188) +GIG,Rio de Janeiro-Antonio Carlos Jobim Int'l,2,major,POINT(-4814387.792751178 -2609340.5039402605) +MAD,Madrid Barajas,2,major,POINT(-397302.229816427 4934204.428034675) +SJU,Luis Muñoz Marin,2,major,POINT(-7347557.271103147 2088888.9944195782) +ARN,Arlanda,2,major,POINT(1996039.722208033 8322469.9470024165) +CGK,Soekarno-Hatta Int'l,2,major,POINT(11872701.938463464 -683313.7212381957) +ATH,Eleftherios Venizelos Int'l,2,major,POINT(2665780.7652553665 4570421.605041616) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports.json new file mode 100644 index 0000000000000..78f85112bd516 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports.json @@ -0,0 +1,19 @@ +{ + "properties": { + "abbrev": { + "type": "keyword" + }, + "name": { + "type": "text" + }, + "scalerank": { + "type": "integer" + }, + "type": { + "type": "keyword" + }, + "location": { + "type": "geo_point" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_web.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_web.json new file mode 100644 index 0000000000000..5dd7c1266148c --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-airports_web.json @@ -0,0 +1,19 @@ +{ + "properties": { + "abbrev": { + "type": "keyword" + }, + "name": { + "type": "text" + }, + "scalerank": { + "type": "integer" + }, + "type": { + "type": "keyword" + }, + "location": { + "type": "point" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 8fe3f7c9eccf3..b23e4d87fe52f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -47,7 +47,7 @@ median_absolute_deviation|? median_absolute_deviation(arg1:?) min |? min(arg1:?) |arg1 |? | "" |? | "" | false | false mv_avg |? mv_avg(arg1:?) |arg1 |? | "" |? | "" | false | false mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false -mv_count |"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false +mv_count |"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false mv_dedupe |"? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" |v | "boolean|date|double|ip|text|integer|keyword|version|long" | "" |? | "Remove duplicate values from a multivalued field." | false | false mv_max |"? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false mv_median |? mv_median(arg1:?) |arg1 |? | "" |? | "" | false | false @@ -73,18 +73,20 @@ tanh |"double tanh(n:integer|long|double|unsigned_long)" tau |? tau() | null | null | null |? | "" | null | false to_bool |? to_bool(arg1:?) |arg1 |? | "" |? | "" | false | false to_boolean |? to_boolean(arg1:?) |arg1 |? | "" |? | "" | false | false +to_cartesianpoint |? to_cartesianpoint(arg1:?) |arg1 |? | "" |? | "" | false | false to_datetime |? to_datetime(arg1:?) |arg1 |? | "" |? | "" | false | false to_dbl |? to_dbl(arg1:?) |arg1 |? | "" |? | "" | false | false to_degrees |? to_degrees(arg1:?) |arg1 |? | "" |? | "" | false | false to_double |? to_double(arg1:?) |arg1 |? | "" |? | "" | false | false to_dt |? to_dt(arg1:?) |arg1 |? | "" |? | "" | false | false +to_geopoint |? to_geopoint(arg1:?) |arg1 |? | "" |? | "" | false | false to_int |? to_int(arg1:?) |arg1 |? | "" |? | "" | false | false to_integer |? to_integer(arg1:?) |arg1 |? | "" |? | "" | false | false to_ip |? to_ip(arg1:?) |arg1 |? | "" |? | "" | false | false to_long |? to_long(arg1:?) |arg1 |? | "" |? | "" | false | false to_radians |? to_radians(arg1:?) |arg1 |? | "" |? | "" | false | false -to_str |"? to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)"|v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "" | false | false -to_string |"? to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)"|v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "" | false | false +to_str |"? to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)"|v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "" | false | false +to_string |"? to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)"|v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "" | false | false to_ul |? to_ul(arg1:?) |arg1 |? | "" |? | "" | false | false to_ulong |? to_ulong(arg1:?) |arg1 |? | "" |? | "" | false | false to_unsigned_long |? to_unsigned_long(arg1:?) |arg1 |? | "" |? | "" | false | false @@ -136,7 +138,7 @@ synopsis:keyword ? min(arg1:?) ? mv_avg(arg1:?) "keyword mv_concat(v:text|keyword, delim:text|keyword)" -"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" +"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" "? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" "? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" ? mv_median(arg1:?) @@ -162,18 +164,20 @@ synopsis:keyword ? tau() ? to_bool(arg1:?) ? to_boolean(arg1:?) +? to_cartesianpoint(arg1:?) ? to_datetime(arg1:?) ? to_dbl(arg1:?) ? to_degrees(arg1:?) ? to_double(arg1:?) ? to_dt(arg1:?) +? to_geopoint(arg1:?) ? to_int(arg1:?) ? to_integer(arg1:?) ? to_ip(arg1:?) ? to_long(arg1:?) ? to_radians(arg1:?) -"? to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -"? to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" +"? to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" +"? to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" ? to_ul(arg1:?) ? to_ulong(arg1:?) ? to_unsigned_long(arg1:?) @@ -204,5 +208,5 @@ countFunctions#[skip:-8.11.99] show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -82 | 82 | 82 +84 | 84 | 84 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec new file mode 100644 index 0000000000000..0ec7c2d6f84e8 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -0,0 +1,98 @@ +convertFromLong#[skip:-8.11.99, reason:spatial type geo_point only added in 8.12] +row long = 1512146573982606908 +| eval pt = to_geopoint(long); + +long:long |pt:geo_point +1512146573982606908 |POINT(42.97109630194 14.7552534413725) +; + +convertFromString#[skip:-8.11.99, reason:spatial type geo_point only added in 8.12] +row wkt = "POINT(42.97109630194 14.7552534413725)" +| eval pt = to_geopoint(wkt); + +wkt:keyword |pt:geo_point +"POINT(42.97109630194 14.7552534413725)" |POINT(42.97109630194 14.7552534413725) +; + +convertFromLongArray#[skip:-8.11.99, reason:spatial type geo_point only added in 8.12] +row long = [1512146573982606908, 2329183180959557901] +| eval pt = to_geopoint(long); + +long:long |pt:geo_point +[1512146573982606908, 2329183180959557901] |[POINT(42.97109630194 14.7552534413725), POINT(75.8092915005895 22.727749187571)] +; + +convertFromStringArray#[skip:-8.11.99, reason:spatial type geo_point only added in 8.12] +row wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] +| eval pt = to_geopoint(wkt); + +wkt:keyword |pt:geo_point +["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] |[POINT(42.97109630194 14.7552534413725), POINT(75.8092915005895 22.727749187571)] +; + +simpleLoad#[skip:-8.11.99, reason:spatial type geo_point only added in 8.12] +FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; + +abbrev:keyword | location:geo_point | name:text | scalerank:i | type:k +CJJ | POINT(127.49591611325741 36.72202274668962) | Cheongju Int'l | 9 | major +HOD | POINT(42.97109629958868 14.7552534006536) | Hodeidah Int'l | 9 | mid +IDR | POINT(75.80929149873555 22.72774917539209) | Devi Ahilyabai Holkar Int'l | 9 | mid +IXC | POINT(76.80172610096633 30.6707248929888) | Chandigarh Int'l | 9 | [major, military] +LYP | POINT(72.98781909048557 31.362743536010385) | Faisalabad Int'l | 9 | [mid, military] +MLG | POINT(112.71141858771443 -7.9299800377339125)| Abdul Rachman Saleh | 9 | [mid, military] +OMS | POINT(73.3163595199585 54.95764828752726) | Omsk Tsentralny | 9 | mid +OVB | POINT(82.6671524439007 55.00958469696343) | Novosibirsk Tolmachev | 9 | mid +OZH | POINT(35.301872827112675 47.87326351739466) | Zaporozhye Int'l | 9 | [mid, military] +TRZ | POINT(78.7089578434825 10.760357128456235) | Tiruchirappalli | 9 | mid +WIIT | POINT(105.17606039531529 -5.242566782981157) | Radin Inten II | 9 | mid +ZAH | POINT(60.9007085300982 29.47529417462647) | Zahedan Int'l | 9 | mid +; + +convertCartesianFromLong#[skip:-8.11.99, reason:spatial type cartesian_point only added in 8.12] +row long = 5009771769843126025 +| eval pt = to_cartesianpoint(long); + +long:long |pt:cartesian_point +5009771769843126025 |POINT(4297.11 -1475.53) +; + +convertCartesianFromString#[skip:-8.11.99, reason:spatial type cartesian_point only added in 8.12] +row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] +| mv_expand wkt +| eval pt = to_cartesianpoint(wkt) +| eval l = to_long(pt); + +wkt:keyword |pt:cartesian_point |l:long +"POINT(4297.11 -1475.53)" |POINT(4297.11 -1475.53) |5009771769843126025 +"POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) |5038656556796611666 +; + +convertCartesianFromLongArray#[skip:-8.11.99, reason:spatial type cartesian_point only added in 8.12] +row long = [5009771769843126025, 5038656556796611666] +| eval pt = to_cartesianpoint(long); + +long:long |pt:cartesian_point +[5009771769843126025, 5038656556796611666] |[POINT(4297.11 -1475.53), POINT(7580.93 2272.77)] +; + +convertCartesianFromStringArray#[skip:-8.11.99, reason:spatial type cartesian_point only added in 8.12] +row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] +| eval pt = to_cartesianpoint(wkt); + +wkt:keyword |pt:cartesian_point +["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] |[POINT(4297.11 -1475.53), POINT(7580.93 2272.77)] +; + +simpleCartesianLoad#[skip:-8.11.99, reason:spatial type cartesian_point only added in 8.12] +FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +CJJ | POINT (14192780.0 4400431.0) | Cheongju Int'l | 9 | major +HOD | POINT (4783520.5 1661010.0) | Hodeidah Int'l | 9 | mid +IDR | POINT (8439052.0 2599127.5) | Devi Ahilyabai Holkar Int'l | 9 | mid +OMS | POINT (8161540.0 7353651.0) | Omsk Tsentralny | 9 | mid +OVB | POINT (9202465.0 7363726.5) | Novosibirsk Tolmachev | 9 | mid +TRZ | POINT (8761841.0 1204941.5) | Tiruchirappalli | 9 | mid +WIIT | POINT (11708145.0 -584415.9375) | Radin Inten II | 9 | mid +ZAH | POINT (6779436.0 3436280.5) | Zahedan Int'l | 9 | mid +; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java new file mode 100644 index 0000000000000..7fffc3845b0e9 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointFromStringEvaluator.java @@ -0,0 +1,126 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToCartesianPoint}. + * This class is generated. Do not edit it. + */ +public final class ToCartesianPointFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToCartesianPointFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToCartesianPointFromString"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendLong(evalValue(vector, p, scratchPad)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToCartesianPoint.fromKeyword(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + long value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static long evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToCartesianPoint.fromKeyword(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToCartesianPointFromStringEvaluator get(DriverContext context) { + return new ToCartesianPointFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToCartesianPointFromStringEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java new file mode 100644 index 0000000000000..de4e66f55ae66 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointFromStringEvaluator.java @@ -0,0 +1,126 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToGeoPoint}. + * This class is generated. Do not edit it. + */ +public final class ToGeoPointFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToGeoPointFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToGeoPointFromString"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendLong(evalValue(vector, p, scratchPad)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static long evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToGeoPoint.fromKeyword(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + long value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static long evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToGeoPoint.fromKeyword(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToGeoPointFromStringEvaluator get(DriverContext context) { + return new ToGeoPointFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToGeoPointFromStringEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java new file mode 100644 index 0000000000000..58544ddccb682 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianPointEvaluator.java @@ -0,0 +1,109 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. + * This class is generated. Do not edit it. + */ +public final class ToStringFromCartesianPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToStringFromCartesianPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToStringFromCartesianPoint"; + } + + @Override + public Block evalVector(Vector v) { + LongVector vector = (LongVector) v; + int positionCount = v.getPositionCount(); + if (vector.isConstant()) { + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); + } + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + builder.appendBytesRef(evalValue(vector, p)); + } + return builder.build(); + } + } + + private static BytesRef evalValue(LongVector container, int index) { + long value = container.getLong(index); + return ToString.fromCartesianPoint(value); + } + + @Override + public Block evalBlock(Block b) { + LongBlock block = (LongBlock) b; + int positionCount = block.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + BytesRef value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendBytesRef(value); + valuesAppended = true; + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(LongBlock container, int index) { + long value = container.getLong(index); + return ToString.fromCartesianPoint(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromCartesianPointEvaluator get(DriverContext context) { + return new ToStringFromCartesianPointEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromCartesianPointEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java new file mode 100644 index 0000000000000..9d2b514ff2482 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoPointEvaluator.java @@ -0,0 +1,109 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. + * This class is generated. Do not edit it. + */ +public final class ToStringFromGeoPointEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToStringFromGeoPointEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToStringFromGeoPoint"; + } + + @Override + public Block evalVector(Vector v) { + LongVector vector = (LongVector) v; + int positionCount = v.getPositionCount(); + if (vector.isConstant()) { + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0), positionCount); + } + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + builder.appendBytesRef(evalValue(vector, p)); + } + return builder.build(); + } + } + + private static BytesRef evalValue(LongVector container, int index) { + long value = container.getLong(index); + return ToString.fromGeoPoint(value); + } + + @Override + public Block evalBlock(Block b) { + LongBlock block = (LongBlock) b; + int positionCount = block.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + BytesRef value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendBytesRef(value); + valuesAppended = true; + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(LongBlock container, int index) { + long value = container.getLong(index); + return ToString.fromGeoPoint(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromGeoPointEvaluator get(DriverContext context) { + return new ToStringFromGeoPointEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromGeoPointEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java index 12ec974142f62..5636fbbb3b23c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java @@ -35,6 +35,8 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public record ColumnInfo(String name, String type) implements Writeable { @@ -160,6 +162,26 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(UTC_DATE_TIME_FORMATTER.formatMillis(longVal)); } }; + case "geo_point" -> new PositionToXContent(block) { + @Override + protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) + throws IOException { + // TODO Perhaps this is just a long for geo_point? And for more advanced types we need a new block type + long encoded = ((LongBlock) block).getLong(valueIndex); + String wkt = GEO.pointAsString(GEO.longAsPoint(encoded)); + return builder.value(wkt); + } + }; + case "cartesian_point" -> new PositionToXContent(block) { + @Override + protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) + throws IOException { + // TODO Perhaps this is just a long for cartesian_point? And for more advanced types we need a new block type + long encoded = ((LongBlock) block).getLong(valueIndex); + String wkt = CARTESIAN.pointAsString(CARTESIAN.longAsPoint(encoded)); + return builder.value(wkt); + } + }; case "boolean" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 3d91eafc8e033..fea9dd6c526c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -58,6 +58,8 @@ import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; public class EsqlQueryResponse extends ActionResponse implements ChunkedToXContent, Releasable { @@ -261,6 +263,8 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef } case "boolean" -> ((BooleanBlock) block).getBoolean(offset); case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); + case "geo_point" -> GEO.longAsPoint(((LongBlock) block).getLong(offset)); + case "cartesian_point" -> CARTESIAN.longAsPoint(((LongBlock) block).getLong(offset)); case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; case "_source" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); @@ -318,6 +322,14 @@ private static Page valuesToPage(List dataTypes, List> valu throw new UncheckedIOException(e); } } + case "geo_point" -> { + long longVal = GEO.pointAsLong(GEO.stringAsPoint(value.toString())); + ((LongBlock.Builder) builder).appendLong(longVal); + } + case "cartesian_point" -> { + long longVal = CARTESIAN.pointAsLong(CARTESIAN.stringAsPoint(value.toString())); + ((LongBlock.Builder) builder).appendLong(longVal); + } default -> throw EsqlIllegalArgumentException.illegalDataType(dataTypes.get(c)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index d8e113862ae71..479690912c904 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -305,6 +305,8 @@ public static Failure validateBinaryComparison(BinaryComparison bc) { allowed.add(DataTypes.IP); allowed.add(DataTypes.DATETIME); allowed.add(DataTypes.VERSION); + allowed.add(EsqlDataTypes.GEO_POINT); + allowed.add(EsqlDataTypes.CARTESIAN_POINT); if (bc instanceof Equals || bc instanceof NotEquals) { allowed.add(DataTypes.BOOLEAN); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java index f609bb5491569..d5a3e1cc6244c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.type.DataType; @@ -136,6 +137,13 @@ public final ExpressionEvaluator.Factory map(BinaryComparison bc, Layout layout) if (leftType == DataTypes.DATETIME) { return longs.apply(leftEval, rightEval); } + if (leftType == EsqlDataTypes.GEO_POINT) { + return longs.apply(leftEval, rightEval); + } + // TODO: Perhaps neithger geo_point, not cartesian_point should support comparisons? + if (leftType == EsqlDataTypes.CARTESIAN_POINT) { + return longs.apply(leftEval, rightEval); + } throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 2199d4bddaf77..b0cdad5095bbe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -20,9 +20,11 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -172,9 +174,11 @@ private FunctionDefinition[][] functions() { // conversion functions new FunctionDefinition[] { def(ToBoolean.class, ToBoolean::new, "to_boolean", "to_bool"), + def(ToCartesianPoint.class, ToCartesianPoint::new, "to_cartesianpoint"), def(ToDatetime.class, ToDatetime::new, "to_datetime", "to_dt"), def(ToDegrees.class, ToDegrees::new, "to_degrees"), def(ToDouble.class, ToDouble::new, "to_double", "to_dbl"), + def(ToGeoPoint.class, ToGeoPoint::new, "to_geopoint"), def(ToIP.class, ToIP::new, "to_ip"), def(ToInteger.class, ToInteger::new, "to_integer", "to_int"), def(ToLong.class, ToLong::new, "to_long"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java new file mode 100644 index 0000000000000..bb384ae846f26 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; + +public class ToCartesianPoint extends AbstractConvertFunction { + + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(CARTESIAN_POINT, (fieldEval, source) -> fieldEval), + Map.entry(LONG, (fieldEval, source) -> fieldEval), + Map.entry(UNSIGNED_LONG, (fieldEval, source) -> fieldEval), + Map.entry(KEYWORD, ToCartesianPointFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToCartesianPointFromStringEvaluator.Factory::new) + ); + + public ToCartesianPoint(Source source, Expression field) { + super(source, field); + } + + @Override + protected Map factories() { + return EVALUATORS; + } + + @Override + public DataType dataType() { + return CARTESIAN_POINT; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToCartesianPoint(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToCartesianPoint::new, field()); + } + + @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) + static long fromKeyword(BytesRef in) { + return CARTESIAN.pointAsLong(CARTESIAN.stringAsPoint(in.utf8ToString())); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java new file mode 100644 index 0000000000000..75ef5c324541b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; + +public class ToGeoPoint extends AbstractConvertFunction { + + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(GEO_POINT, (fieldEval, source) -> fieldEval), + Map.entry(LONG, (fieldEval, source) -> fieldEval), + Map.entry(UNSIGNED_LONG, (fieldEval, source) -> fieldEval), + Map.entry(KEYWORD, ToGeoPointFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToGeoPointFromStringEvaluator.Factory::new) + ); + + public ToGeoPoint(Source source, Expression field) { + super(source, field); + } + + @Override + protected Map factories() { + return EVALUATORS; + } + + @Override + public DataType dataType() { + return GEO_POINT; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToGeoPoint(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToGeoPoint::new, field()); + } + + @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) + static long fromKeyword(BytesRef in) { + return GEO.pointAsLong(GEO.stringAsPoint(in.utf8ToString())); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index 8907ba930024e..b66ad4f359607 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -19,6 +19,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToLong; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; @@ -35,6 +37,8 @@ public class ToLong extends AbstractConvertFunction { private static final Map EVALUATORS = Map.ofEntries( Map.entry(LONG, (fieldEval, source) -> fieldEval), Map.entry(DATETIME, (fieldEval, source) -> fieldEval), + Map.entry(GEO_POINT, (fieldEval, source) -> fieldEval), + Map.entry(CARTESIAN_POINT, (fieldEval, source) -> fieldEval), Map.entry(BOOLEAN, ToLongFromBooleanEvaluator.Factory::new), Map.entry(KEYWORD, ToLongFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToLongFromDoubleEvaluator.Factory::new), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 98118162e742d..a37b2becc8595 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -21,6 +21,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -33,6 +35,8 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.VERSION; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class ToString extends AbstractConvertFunction implements EvaluatorMapper { @@ -46,14 +50,28 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper Map.entry(INTEGER, ToStringFromIntEvaluator.Factory::new), Map.entry(TEXT, (fieldEval, source) -> fieldEval), Map.entry(VERSION, ToStringFromVersionEvaluator.Factory::new), - Map.entry(UNSIGNED_LONG, ToStringFromUnsignedLongEvaluator.Factory::new) + Map.entry(UNSIGNED_LONG, ToStringFromUnsignedLongEvaluator.Factory::new), + Map.entry(GEO_POINT, ToStringFromGeoPointEvaluator.Factory::new), + Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new) ); public ToString( Source source, @Param( name = "v", - type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + type = { + "unsigned_long", + "date", + "boolean", + "double", + "ip", + "text", + "integer", + "keyword", + "version", + "long", + "geo_point", + "cartesian_point" } ) Expression v ) { super(source, v); @@ -118,4 +136,14 @@ static BytesRef fromVersion(BytesRef version) { static BytesRef fromUnsignedLong(long lng) { return new BytesRef(unsignedLongAsNumber(lng).toString()); } + + @ConvertEvaluator(extraName = "FromGeoPoint") + static BytesRef fromGeoPoint(long point) { + return new BytesRef(GEO.pointAsString(GEO.longAsPoint(point))); + } + + @ConvertEvaluator(extraName = "FromCartesianPoint") + static BytesRef fromCartesianPoint(long point) { + return new BytesRef(CARTESIAN.pointAsString(CARTESIAN.longAsPoint(point))); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 6d1446f4cccf4..bf05aeee4d228 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -38,7 +38,19 @@ public MvCount( Source source, @Param( name = "v", - type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + type = { + "unsigned_long", + "date", + "boolean", + "double", + "ip", + "text", + "integer", + "keyword", + "version", + "long", + "geo_point", + "cartesian_point" } ) Expression v ) { super(source, v); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index cee1d533b4332..e404e4d9151f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -15,13 +15,14 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; /** @@ -41,12 +42,12 @@ public MvMax( @Override protected TypeResolution resolveFieldType() { - return isType(field(), EsqlDataTypes::isRepresentable, sourceText(), null, "representable"); + return isType(field(), t -> isSpatial(t) == false && isRepresentable(t), sourceText(), null, "representableNonSpatial"); } @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toElementType(field().dataType())) { + return switch (LocalExecutionPlanner.toSortableElementType(field().dataType())) { case BOOLEAN -> new MvMaxBooleanEvaluator.Factory(fieldEval); case BYTES_REF -> new MvMaxBytesRefEvaluator.Factory(fieldEval); case DOUBLE -> new MvMaxDoubleEvaluator.Factory(fieldEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index 18b452f9c7040..de78e52a19eb6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -15,13 +15,14 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isRepresentable; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; /** @@ -41,12 +42,12 @@ public MvMin( @Override protected TypeResolution resolveFieldType() { - return isType(field(), EsqlDataTypes::isRepresentable, sourceText(), null, "representable"); + return isType(field(), t -> isSpatial(t) == false && isRepresentable(t), sourceText(), null, "representableNonSpatial"); } @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toElementType(field().dataType())) { + return switch (LocalExecutionPlanner.toSortableElementType(field().dataType())) { case BOOLEAN -> new MvMinBooleanEvaluator.Factory(fieldEval); case BYTES_REF -> new MvMinBytesRefEvaluator.Factory(fieldEval); case DOUBLE -> new MvMinDoubleEvaluator.Factory(fieldEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormat.java index ac8f9560074f5..b6bb7b8d74429 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormat.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.xcontent.MediaType; @@ -290,10 +291,18 @@ public Iterator> format(RestRequest request hasHeader(request) && esqlResponse.columns() != null ? Iterators.single(writer -> row(writer, esqlResponse.columns().iterator(), ColumnInfo::name, delimiter)) : Collections.emptyIterator(), - Iterators.map(esqlResponse.values(), row -> writer -> row(writer, row, f -> Objects.toString(f, StringUtils.EMPTY), delimiter)) + Iterators.map(esqlResponse.values(), row -> writer -> row(writer, row, TextFormat::formatEsqlResultObject, delimiter)) ); } + private static String formatEsqlResultObject(Object obj) { + // TODO: It would be nicer to override GeoPoint.toString() but that has consequences + if (obj instanceof SpatialPoint point) { + return String.format(Locale.ROOT, "POINT (%.7f %.7f)", point.getX(), point.getY()); + } + return Objects.toString(obj, StringUtils.EMPTY); + } + boolean hasHeader(RestRequest request) { return true; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormatter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormatter.java index 0535e4adfe346..48196d2bffbde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormatter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/formatter/TextFormatter.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.formatter; import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -15,6 +16,7 @@ import java.io.Writer; import java.util.Collections; import java.util.Iterator; +import java.util.Locale; import java.util.Objects; import java.util.function.Function; @@ -29,7 +31,7 @@ public class TextFormatter { private final EsqlQueryResponse response; private final int[] width; - private final Function FORMATTER = Objects::toString; + private final Function FORMATTER = TextFormatter::formatEsqlResultObject; /** * Create a new {@linkplain TextFormatter} for formatting responses. @@ -128,4 +130,12 @@ private static void writePadding(int padding, Writer writer) throws IOException writer.append(PADDING_64, 0, padding); } } + + private static String formatEsqlResultObject(Object obj) { + // TODO: It would be nicer to override GeoPoint.toString() but that has consequences + if (obj instanceof SpatialPoint point) { + return String.format(Locale.ROOT, "POINT (%.7f %.7f)", point.getX(), point.getY()); + } + return Objects.toString(obj); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 20ec1ac410f64..a0e9c620d0fce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -37,9 +37,11 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -321,9 +323,11 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, Tan.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, Tanh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToBoolean.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, ToCartesianPoint.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToDatetime.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToDegrees.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToDouble.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, ToGeoPoint.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToIP.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToInteger.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToLong.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), @@ -1155,9 +1159,11 @@ static void writeBinaryLogic(PlanStreamOutput out, BinaryLogic binaryLogic) thro entry(name(Tan.class), Tan::new), entry(name(Tanh.class), Tanh::new), entry(name(ToBoolean.class), ToBoolean::new), + entry(name(ToCartesianPoint.class), ToCartesianPoint::new), entry(name(ToDatetime.class), ToDatetime::new), entry(name(ToDegrees.class), ToDegrees::new), entry(name(ToDouble.class), ToDouble::new), + entry(name(ToGeoPoint.class), ToGeoPoint::new), entry(name(ToIP.class), ToIP::new), entry(name(ToInteger.class), ToInteger::new), entry(name(ToLong.class), ToLong::new), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 74cc4ab999808..0c4e10e91cb29 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -78,6 +78,7 @@ import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; @@ -303,9 +304,27 @@ public static ElementType toElementType(DataType dataType) { if (dataType == EsQueryExec.DOC_DATA_TYPE) { return ElementType.DOC; } + if (dataType == EsqlDataTypes.GEO_POINT) { + return ElementType.LONG; + } + if (dataType == EsqlDataTypes.CARTESIAN_POINT) { + return ElementType.LONG; + } throw EsqlIllegalArgumentException.illegalDataType(dataType); } + /** + * Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only. + * This specifically excludes GEO_POINT and CARTESIAN_POINT, which are backed by DataType.LONG + * but are not themselves sortable (the long can be sorted, but the sort order is not usually useful). + */ + public static ElementType toSortableElementType(DataType dataType) { + if (dataType == EsqlDataTypes.GEO_POINT || dataType == EsqlDataTypes.CARTESIAN_POINT) { + return ElementType.UNKNOWN; + } + return toElementType(dataType); + } + private PhysicalOperation planOutput(OutputExec outputExec, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(outputExec.child(), context); var output = outputExec.output(); @@ -409,7 +428,8 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case "text", "keyword" -> TopNEncoder.UTF8; case "version" -> TopNEncoder.VERSION; case "boolean", "null", "byte", "short", "integer", "long", "double", "float", "half_float", "datetime", "date_period", - "time_duration", "object", "nested", "scaled_float", "unsigned_long", "_doc" -> TopNEncoder.DEFAULT_SORTABLE; + "time_duration", "object", "nested", "scaled_float", "unsigned_long", "_doc", "geo_point", "cartesian_point" -> + TopNEncoder.DEFAULT_SORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point case "unsupported" -> TopNEncoder.UNSUPPORTED; default -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index 61a739c786dac..03e2d40c8cb48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -44,6 +44,8 @@ public final class EsqlDataTypes { public static final DataType DATE_PERIOD = new DataType("DATE_PERIOD", null, 3 * Integer.BYTES, false, false, false); public static final DataType TIME_DURATION = new DataType("TIME_DURATION", null, Integer.BYTES + Long.BYTES, false, false, false); + public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, false); + public static final DataType CARTESIAN_POINT = new DataType("cartesian_point", Double.BYTES * 2, false, false, false); private static final Collection TYPES = Stream.of( BOOLEAN, @@ -67,7 +69,9 @@ public final class EsqlDataTypes { SCALED_FLOAT, SOURCE, VERSION, - UNSIGNED_LONG + UNSIGNED_LONG, + GEO_POINT, + CARTESIAN_POINT ).sorted(Comparator.comparing(DataType::typeName)).toList(); private static final Map NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t)); @@ -76,6 +80,8 @@ public final class EsqlDataTypes { static { Map map = TYPES.stream().filter(e -> e.esType() != null).collect(toMap(DataType::esType, t -> t)); + // ES calls this 'point', but ESQL calls it 'cartesian_point' + map.put("point", CARTESIAN_POINT); ES_TO_TYPE = Collections.unmodifiableMap(map); } @@ -147,6 +153,10 @@ public static boolean isTemporalAmount(DataType t) { return t == DATE_PERIOD || t == TIME_DURATION; } + public static boolean isSpatial(DataType t) { + return t == GEO_POINT || t == CARTESIAN_POINT; + } + /** * Supported types that can be contained in a block. */ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 3316f76c44680..5b2aba2e9e1f3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -47,6 +47,8 @@ import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.equalTo; public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase { @@ -115,6 +117,8 @@ private Page randomPage(List columns) { new BytesRef(UnsupportedValueSource.UNSUPPORTED_OUTPUT) ); case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(randomIdentifier()).toBytesRef()); + case "geo_point" -> ((LongBlock.Builder) builder).appendLong(GEO.pointAsLong(randomGeoPoint())); + case "cartesian_point" -> ((LongBlock.Builder) builder).appendLong(CARTESIAN.pointAsLong(randomCartesianPoint())); case "null" -> builder.appendNull(); case "_source" -> { try { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 8990433a5155d..ba63afd8f1e4b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1197,78 +1197,78 @@ public void testEmptyEsRelationOnCountStar() throws IOException { } public void testUnsupportedFieldsInStats() { - var errorMsg = "Cannot use field [point] with unsupported type [geo_point]"; + var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; verifyUnsupported(""" from test - | stats max(point) + | stats max(shape) """, errorMsg); verifyUnsupported(""" from test - | stats max(int) by point + | stats max(int) by shape """, errorMsg); verifyUnsupported(""" from test - | stats max(int) by bool, point + | stats max(int) by bool, shape """, errorMsg); } public void testUnsupportedFieldsInEval() { - var errorMsg = "Cannot use field [point] with unsupported type [geo_point]"; + var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; verifyUnsupported(""" from test - | eval x = point + | eval x = shape """, errorMsg); verifyUnsupported(""" from test - | eval foo = 1, x = point + | eval foo = 1, x = shape """, errorMsg); verifyUnsupported(""" from test - | eval x = 1 + point + | eval x = 1 + shape """, errorMsg); } public void testUnsupportedFieldsInWhere() { - var errorMsg = "Cannot use field [point] with unsupported type [geo_point]"; + var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; verifyUnsupported(""" from test - | where point == "[1.0, 1.0]" + | where shape == "[1.0, 1.0]" """, errorMsg); verifyUnsupported(""" from test - | where int > 2 and point == "[1.0, 1.0]" + | where int > 2 and shape == "[1.0, 1.0]" """, errorMsg); } public void testUnsupportedFieldsInSort() { - var errorMsg = "Cannot use field [point] with unsupported type [geo_point]"; + var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; verifyUnsupported(""" from test - | sort point + | sort shape """, errorMsg); verifyUnsupported(""" from test - | sort int, point + | sort int, shape """, errorMsg); } public void testUnsupportedFieldsInDissect() { - var errorMsg = "Cannot use field [point] with unsupported type [geo_point]"; + var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; verifyUnsupported(""" from test - | dissect point \"%{foo}\" + | dissect shape \"%{foo}\" """, errorMsg); } public void testUnsupportedFieldsInGrok() { - var errorMsg = "Cannot use field [point] with unsupported type [geo_point]"; + var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; verifyUnsupported(""" from test - | grok point \"%{WORD:foo}\" + | grok shape \"%{WORD:foo}\" """, errorMsg); } @@ -1292,7 +1292,8 @@ public void testRegexOnInt() { public void testUnsupportedTypesWithToString() { // DATE_PERIOD and TIME_DURATION types have been added, but not really patched through the engine; i.e. supported. - final String supportedTypes = "boolean, datetime, double, integer, ip, keyword, long, text, unsigned_long or version"; + final String supportedTypes = "boolean, cartesian_point, datetime, double, geo_point, integer, ip, keyword, long, text, " + + "unsigned_long or version"; verifyUnsupported( "row period = 1 year | eval to_string(period)", "line 1:28: argument of [to_string(period)] must be [" + supportedTypes + "], found value [period] type [date_period]" @@ -1301,7 +1302,7 @@ public void testUnsupportedTypesWithToString() { "row duration = 1 hour | eval to_string(duration)", "line 1:30: argument of [to_string(duration)] must be [" + supportedTypes + "], found value [duration] type [time_duration]" ); - verifyUnsupported("from test | eval to_string(point)", "line 1:28: Cannot use field [point] with unsupported type [geo_point]"); + verifyUnsupported("from test | eval to_string(shape)", "line 1:28: Cannot use field [shape] with unsupported type [geo_shape]"); } public void testNonExistingEnrichPolicy() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 64b93cc6eae5d..6e1b9487d1c9c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -86,6 +86,9 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -117,6 +120,8 @@ public static Literal randomLiteral(DataType type) { case "time_duration" -> Duration.ofMillis(randomLongBetween(-604800000L, 604800000L)); // plus/minus 7 days case "text" -> new BytesRef(randomAlphaOfLength(50)); case "version" -> randomVersion().toBytesRef(); + case "geo_point" -> GEO.pointAsLong(randomGeoPoint()); + case "cartesian_point" -> CARTESIAN.pointAsLong(randomCartesianPoint()); case "null" -> null; case "_source" -> { try { @@ -795,7 +800,9 @@ private static String typeErrorMessage(boolean includeOrdinal, List validTypes) { @@ -813,10 +820,22 @@ private static String expectedType(Set validTypes) { return named; } - private static Stream representable() { + protected static Stream representable() { return EsqlDataTypes.types().stream().filter(EsqlDataTypes::isRepresentable); } + protected static DataType[] representableTypes() { + return representable().toArray(DataType[]::new); + } + + protected static Stream representableNonSpatial() { + return representable().filter(t -> isSpatial(t) == false); + } + + protected static DataType[] representableNonSpatialTypes() { + return representableNonSpatial().toArray(DataType[]::new); + } + @AfterClass public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 8603cea9e873c..c1e9494541636 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -35,6 +35,10 @@ import java.util.function.UnaryOperator; import java.util.stream.Collectors; +import static org.elasticsearch.test.ESTestCase.randomCartesianPoint; +import static org.elasticsearch.test.ESTestCase.randomGeoPoint; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.equalTo; /** @@ -339,6 +343,48 @@ public static void forUnaryDatetime( ); } + /** + * Generate positive test cases for a unary function operating on an {@link EsqlDataTypes#GEO_POINT}. + */ + public static void forUnaryGeoPoint( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + List warnings + ) { + unaryNumeric( + suppliers, + expectedEvaluatorToString, + EsqlDataTypes.GEO_POINT, + geoPointCases(), + expectedType, + n -> expectedValue.apply(n.longValue()), + warnings + ); + } + + /** + * Generate positive test cases for a unary function operating on an {@link EsqlDataTypes#CARTESIAN_POINT}. + */ + public static void forUnaryCartesianPoint( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + List warnings + ) { + unaryNumeric( + suppliers, + expectedEvaluatorToString, + EsqlDataTypes.CARTESIAN_POINT, + cartesianPointCases(), + expectedType, + n -> expectedValue.apply(n.longValue()), + warnings + ); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#IP}. */ @@ -633,6 +679,16 @@ private static List dateCases() { ); } + private static List geoPointCases() { + return List.of(new TypedDataSupplier("", () -> GEO.pointAsLong(randomGeoPoint()), EsqlDataTypes.GEO_POINT)); + } + + private static List cartesianPointCases() { + return List.of( + new TypedDataSupplier("", () -> CARTESIAN.pointAsLong(randomCartesianPoint()), EsqlDataTypes.CARTESIAN_POINT) + ); + } + private static List ipCases() { return List.of( new TypedDataSupplier( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java index 08783823fc00f..dcdfd49b8029c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractScalarFunctionTestCase.java @@ -102,10 +102,6 @@ protected final DataType[] representableNumerics() { return EsqlDataTypes.types().stream().filter(DataType::isNumeric).filter(EsqlDataTypes::isRepresentable).toArray(DataType[]::new); } - protected final DataType[] representable() { - return EsqlDataTypes.types().stream().filter(EsqlDataTypes::isRepresentable).toArray(DataType[]::new); - } - protected record ArgumentSpec(boolean optional, Set validTypes) {} public final void testResolveType() { @@ -187,9 +183,12 @@ private String expectedTypeName(Set validTypes) { if (withoutNull.equals(negations)) { return "numeric, date_period or time_duration"; } - if (validTypes.equals(Set.copyOf(Arrays.asList(representable())))) { + if (validTypes.equals(Set.copyOf(Arrays.asList(representableTypes())))) { return "representable"; } + if (validTypes.equals(Set.copyOf(Arrays.asList(representableNonSpatialTypes())))) { + return "representableNonSpatial"; + } throw new IllegalArgumentException("can't guess expected type for " + validTypes); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java index 4712a1afa9399..088b9b438898b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java @@ -24,6 +24,9 @@ import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; + public class ToStringTests extends AbstractFunctionTestCase { public ToStringTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -84,6 +87,20 @@ public static Iterable parameters() { i -> new BytesRef(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(i.toEpochMilli())), List.of() ); + TestCaseSupplier.forUnaryGeoPoint( + suppliers, + "ToStringFromGeoPointEvaluator[field=" + read + "]", + DataTypes.KEYWORD, + i -> new BytesRef(GEO.pointAsString(GEO.longAsPoint(i))), + List.of() + ); + TestCaseSupplier.forUnaryCartesianPoint( + suppliers, + "ToStringFromCartesianPointEvaluator[field=" + read + "]", + DataTypes.KEYWORD, + i -> new BytesRef(CARTESIAN.pointAsString(CARTESIAN.longAsPoint(i))), + List.of() + ); TestCaseSupplier.forUnaryIp( suppliers, "ToStringFromIPEvaluator[field=" + read + "]", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index a87cc379e8c3f..fe04a659651de 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -8,15 +8,18 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.compute.data.Block; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.NumericUtils; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; import org.hamcrest.Matcher; import java.math.BigInteger; @@ -26,11 +29,15 @@ import java.util.List; import java.util.function.BiFunction; import java.util.function.Function; +import java.util.function.Supplier; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; + public abstract class AbstractMultivalueFunctionTestCase extends AbstractScalarFunctionTestCase { /** * Build many test cases with {@code boolean} values. @@ -381,6 +388,104 @@ protected static void dateTimes( } } + /** + * Build many test cases with {@code geo_point} values. + */ + protected static void geoPoints( + List cases, + String name, + String evaluatorName, + BiFunction> matcher + ) { + geoPoints(cases, name, evaluatorName, EsqlDataTypes.GEO_POINT, matcher); + } + + /** + * Build many test cases with {@code geo_point} values that are converted to another type. + * For example, mv_count() can consume points and produce an integer count. + */ + protected static void geoPoints( + List cases, + String name, + String evaluatorName, + DataType expectedDataType, + BiFunction> matcher + ) { + points(cases, name, evaluatorName, EsqlDataTypes.GEO_POINT, expectedDataType, GEO, ESTestCase::randomGeoPoint, matcher); + } + + /** + * Build many test cases with {@code cartesian_point} values. + */ + protected static void cartesianPoints( + List cases, + String name, + String evaluatorName, + BiFunction> matcher + ) { + cartesianPoints(cases, name, evaluatorName, EsqlDataTypes.CARTESIAN_POINT, matcher); + } + + /** + * Build many test cases with {@code cartesian_point} values that are converted to another type. + * For example, mv_count() can consume points and produce an integer count. + */ + protected static void cartesianPoints( + List cases, + String name, + String evaluatorName, + DataType expectedDataType, + BiFunction> matcher + ) { + points( + cases, + name, + evaluatorName, + EsqlDataTypes.CARTESIAN_POINT, + expectedDataType, + CARTESIAN, + ESTestCase::randomCartesianPoint, + matcher + ); + } + + /** + * Build many test cases with either {@code geo_point} or {@code cartesian_point} values. + */ + protected static void points( + List cases, + String name, + String evaluatorName, + DataType dataType, + DataType expectedDataType, + SpatialCoordinateTypes coordType, + Supplier randomPoint, + BiFunction> matcher + ) { + cases.add(new TestCaseSupplier(name + "(" + dataType.typeName() + ")", List.of(dataType), () -> { + SpatialPoint point = randomPoint.get(); + long data = coordType.pointAsLong(point); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(List.of(data), dataType, "field")), + evaluatorName + "[field=Attribute[channel=0]]", + expectedDataType, + matcher.apply(1, LongStream.of(data)) + ); + })); + for (Block.MvOrdering ordering : Block.MvOrdering.values()) { + cases.add(new TestCaseSupplier(name + "(<" + dataType.typeName() + "s>) " + ordering, List.of(dataType), () -> { + List mvData = randomList(1, 100, () -> coordType.pointAsLong(randomPoint.get())); + putInOrder(mvData, ordering); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(mvData, dataType, "field")), + evaluatorName + "[field=Attribute[channel=0]]", + expectedDataType, + matcher.apply(mvData.size(), mvData.stream().mapToLong(Long::longValue)) + ); + })); + } + } + /** * Build many test cases with unsigned {@code long} values. */ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java index deffc42244c10..1abbd62faa0bd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java @@ -37,6 +37,8 @@ public static Iterable parameters() { longs(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); unsignedLongs(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); dateTimes(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + geoPoints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + cartesianPoints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, cases))); } @@ -47,7 +49,7 @@ protected Expression build(Source source, Expression field) { @Override protected DataType[] supportedTypes() { - return representable(); + return representableTypes(); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java index 375a7a769ccfd..2299d1a47d3a7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java @@ -54,7 +54,7 @@ protected Expression build(Source source, Expression field) { @Override protected DataType[] supportedTypes() { - return representable(); + return representableTypes(); } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java index 8f7292adb86a4..25764a9029bfd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java @@ -54,6 +54,6 @@ protected Expression build(Source source, Expression field) { @Override protected DataType[] supportedTypes() { - return representable(); + return representableNonSpatialTypes(); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java index 750d5d4cb89ce..5556755cfe125 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java @@ -54,6 +54,6 @@ protected Expression build(Source source, Expression field) { @Override protected DataType[] supportedTypes() { - return representable(); + return representableNonSpatialTypes(); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java index 1edb00e474e3c..2348c32f58687 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java @@ -92,7 +92,8 @@ protected final void validateType(BinaryOperator op, DataType lhsTyp equalTo( String.format( Locale.ROOT, - "first argument of [%s %s] must be [numeric, keyword, text, ip, datetime or version], found value [] type [%s]", + "first argument of [%s %s] must be [numeric, keyword, text, ip, datetime, version, geo_point or " + + "cartesian_point], found value [] type [%s]", lhsType.typeName(), rhsType.typeName(), lhsType.typeName() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index c6759b66ee69d..95d8babcc5802 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntArrayVector; +import org.elasticsearch.compute.data.LongArrayVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; @@ -34,6 +35,7 @@ import static org.elasticsearch.xpack.esql.formatter.TextFormat.CSV; import static org.elasticsearch.xpack.esql.formatter.TextFormat.PLAIN_TEXT; import static org.elasticsearch.xpack.esql.formatter.TextFormat.TSV; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; public class TextFormatTests extends ESTestCase { @@ -116,17 +118,17 @@ public void testTsvFormatWithEmptyData() { public void testCsvFormatWithRegularData() { String text = format(CSV, req(), regularData()); assertEquals(""" - string,number\r - Along The River Bank,708\r - Mind Train,280\r + string,number,location\r + Along The River Bank,708,POINT (12.0000000 56.0000000)\r + Mind Train,280,POINT (-97.0000000 26.0000000)\r """, text); } public void testCsvFormatNoHeaderWithRegularData() { String text = format(CSV, reqWithParam("header", "absent"), regularData()); assertEquals(""" - Along The River Bank,708\r - Mind Train,280\r + Along The River Bank,708,POINT (12.0000000 56.0000000)\r + Mind Train,280,POINT (-97.0000000 26.0000000)\r """, text); } @@ -134,12 +136,24 @@ public void testCsvFormatWithCustomDelimiterRegularData() { Set forbidden = Set.of('"', '\r', '\n', '\t'); Character delim = randomValueOtherThanMany(forbidden::contains, () -> randomAlphaOfLength(1).charAt(0)); String text = format(CSV, reqWithParam("delimiter", String.valueOf(delim)), regularData()); - List terms = Arrays.asList("string", "number", "Along The River Bank", "708", "Mind Train", "280"); + List terms = Arrays.asList( + "string", + "number", + "location", + "Along The River Bank", + "708", + "POINT (12.0000000 56.0000000)", + "Mind Train", + "280", + "POINT (-97.0000000 26.0000000)" + ); List expectedTerms = terms.stream() .map(x -> x.contains(String.valueOf(delim)) ? '"' + x + '"' : x) .collect(Collectors.toList()); StringBuffer sb = new StringBuffer(); do { + sb.append(expectedTerms.remove(0)); + sb.append(delim); sb.append(expectedTerms.remove(0)); sb.append(delim); sb.append(expectedTerms.remove(0)); @@ -151,9 +165,9 @@ public void testCsvFormatWithCustomDelimiterRegularData() { public void testTsvFormatWithRegularData() { String text = format(TSV, req(), regularData()); assertEquals(""" - string\tnumber - Along The River Bank\t708 - Mind Train\t280 + string\tnumber\tlocation + Along The River Bank\t708\tPOINT (12.0000000 56.0000000) + Mind Train\t280\tPOINT (-97.0000000 26.0000000) """, text); } @@ -227,7 +241,11 @@ private static EsqlQueryResponse emptyData() { private static EsqlQueryResponse regularData() { // headers - List headers = asList(new ColumnInfo("string", "keyword"), new ColumnInfo("number", "integer")); + List headers = asList( + new ColumnInfo("string", "keyword"), + new ColumnInfo("number", "integer"), + new ColumnInfo("location", "geo_point") + ); // values List values = List.of( @@ -236,7 +254,8 @@ private static EsqlQueryResponse regularData() { .appendBytesRef(new BytesRef("Along The River Bank")) .appendBytesRef(new BytesRef("Mind Train")) .build(), - new IntArrayVector(new int[] { 11 * 60 + 48, 4 * 60 + 40 }, 2).asBlock() + new IntArrayVector(new int[] { 11 * 60 + 48, 4 * 60 + 40 }, 2).asBlock(), + new LongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock() ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index fa0b01dc3366a..558a92de70351 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -23,6 +23,7 @@ import static org.elasticsearch.rest.RestResponseUtils.getTextBodyContent; import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.arrayWithSize; public class TextFormatterTests extends ESTestCase { @@ -34,6 +35,7 @@ public class TextFormatterTests extends ESTestCase { new ColumnInfo("superduperwidename!!!", "double"), new ColumnInfo("baz", "keyword"), new ColumnInfo("date", "date"), + new ColumnInfo("location", "geo_point"), new ColumnInfo("null_field2", "keyword") ); EsqlQueryResponse esqlResponse = new EsqlQueryResponse( @@ -55,6 +57,7 @@ public class TextFormatterTests extends ESTestCase { UTC_DATE_TIME_FORMATTER.parseMillis("2000-03-15T21:34:37.443Z") }, 2 ).asBlock(), + new LongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), Block.constantNullBlock(2) ) ), @@ -75,22 +78,22 @@ public void testFormatWithHeader() { assertThat(result, arrayWithSize(4)); assertEquals( " foo | bar |15charwidename!| null_field1 |superduperwidename!!!| baz |" - + " date | null_field2 ", + + " date | location | null_field2 ", result[0] ); assertEquals( "---------------+---------------+---------------+---------------+---------------------+---------------+" - + "------------------------+---------------", + + "------------------------+------------------------------+---------------", result[1] ); assertEquals( "15charwidedata!|1 |6.888 |null |12.0 |rabbit |" - + "1953-09-02T00:00:00.000Z|null ", + + "1953-09-02T00:00:00.000Z|POINT (12.0000000 56.0000000) |null ", result[2] ); assertEquals( "dog |2 |123124.888 |null |9912.0 |goat |" - + "2000-03-15T21:34:37.443Z|null ", + + "2000-03-15T21:34:37.443Z|POINT (-97.0000000 26.0000000)|null ", result[3] ); } @@ -116,6 +119,7 @@ public void testFormatWithoutHeader() { UTC_DATE_TIME_FORMATTER.parseMillis("2231-12-31T23:59:59.999Z") }, 2 ).asBlock(), + new LongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), Block.constantNullBlock(2) ) ), @@ -126,12 +130,12 @@ public void testFormatWithoutHeader() { assertThat(result, arrayWithSize(2)); assertEquals( "doggie |4 |1.0 |null |77.0 |wombat |" - + "1955-01-21T01:02:03.342Z|null ", + + "1955-01-21T01:02:03.342Z|POINT (12.0000000 56.0000000) |null ", result[0] ); assertEquals( "dog |2 |123124.888 |null |9912.0 |goat |" - + "2231-12-31T23:59:59.999Z|null ", + + "2231-12-31T23:59:59.999Z|POINT (-97.0000000 26.0000000)|null ", result[1] ); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java new file mode 100644 index 0000000000000..57e472cd5bb17 --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql.util; + +import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.geo.XYEncodingUtils; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.GeometryValidator; +import org.elasticsearch.geometry.utils.WellKnownText; + +import java.util.Locale; + +import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude; +import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude; + +public enum SpatialCoordinateTypes { + GEO { + public SpatialPoint longAsPoint(long encoded) { + return new GeoPoint(GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)), GeoEncodingUtils.decodeLongitude((int) encoded)); + } + + public long pointAsLong(double x, double y) { + int latitudeEncoded = encodeLatitude(y); + int longitudeEncoded = encodeLongitude(x); + return (((long) latitudeEncoded) << 32) | (longitudeEncoded & 0xFFFFFFFFL); + } + + public SpatialPoint pointAsPoint(Point point) { + return new GeoPoint(point.getY(), point.getX()); + } + }, + CARTESIAN { + public SpatialPoint longAsPoint(long encoded) { + final double x = XYEncodingUtils.decode((int) (encoded >>> 32)); + final double y = XYEncodingUtils.decode((int) (encoded & 0xFFFFFFFF)); + return makePoint(x, y); + } + + public long pointAsLong(double x, double y) { + final long xi = XYEncodingUtils.encode((float) x); + final long yi = XYEncodingUtils.encode((float) y); + return (yi & 0xFFFFFFFFL) | xi << 32; + } + + public SpatialPoint pointAsPoint(Point point) { + return makePoint(point.getX(), point.getY()); + } + + private SpatialPoint makePoint(double x, double y) { + return new SpatialPoint() { + @Override + public double getX() { + return x; + } + + @Override + public double getY() { + return y; + } + + @Override + public int hashCode() { + return 31 * Double.hashCode(x) + Double.hashCode(y); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj instanceof SpatialPoint other) { + return x == other.getX() && y == other.getY(); + } + return false; + } + + @Override + public String toString() { + return String.format(Locale.ROOT, "POINT (%f %f)", x, y); + } + }; + } + }; + + public abstract SpatialPoint longAsPoint(long encoded); + + public long pointAsLong(SpatialPoint point) { + return pointAsLong(point.getX(), point.getY()); + } + + public abstract long pointAsLong(double x, double y); + + public String pointAsString(SpatialPoint point) { + return WellKnownText.toWKT(new Point(point.getX(), point.getY())); + } + + public SpatialPoint stringAsPoint(String string) { + try { + Geometry geometry = WellKnownText.fromWKT(GeometryValidator.NOOP, false, string); + if (geometry instanceof Point point) { + return pointAsPoint(point); + } else { + throw new IllegalArgumentException("Unsupported geometry type " + geometry.type()); + } + } catch (Exception e) { + throw new RuntimeException("Failed to parse WKT: " + e.getMessage(), e); + } + } + + public abstract SpatialPoint pointAsPoint(Point point); +} diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java new file mode 100644 index 0000000000000..d4db20faf0050 --- /dev/null +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql.util; + +import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.test.ESTestCase; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.closeTo; + +public class SpatialCoordinateTypesTests extends ESTestCase { + + private static final Map types = new LinkedHashMap<>(); + static { + types.put(SpatialCoordinateTypes.GEO, new TestTypeFunctions(ESTestCase::randomGeoPoint, v -> 1e-5)); + types.put(SpatialCoordinateTypes.CARTESIAN, new TestTypeFunctions(ESTestCase::randomCartesianPoint, v -> Math.abs(v / 1e5))); + } + + record TestTypeFunctions(Supplier randomPoint, Function error) {} + + public void testEncoding() { + for (var type : types.entrySet()) { + for (int i = 0; i < 10; i++) { + SpatialCoordinateTypes coordType = type.getKey(); + SpatialPoint original = type.getValue().randomPoint().get(); + var error = type.getValue().error; + SpatialPoint point = coordType.longAsPoint(coordType.pointAsLong(original)); + assertThat(coordType + ": Y[" + i + "]", point.getY(), closeTo(original.getY(), error.apply(original.getX()))); + assertThat(coordType + ": X[" + i + "]", point.getX(), closeTo(original.getX(), error.apply(original.getY()))); + } + } + } + + public void testParsing() { + for (var type : types.entrySet()) { + for (int i = 0; i < 10; i++) { + SpatialCoordinateTypes coordType = type.getKey(); + SpatialPoint geoPoint = type.getValue().randomPoint.get(); + SpatialPoint point = coordType.stringAsPoint(coordType.pointAsString(geoPoint)); + assertThat(coordType + ": Y[" + i + "]", point.getY(), closeTo(geoPoint.getY(), 1e-5)); + assertThat(coordType + ": X[" + i + "]", point.getX(), closeTo(geoPoint.getX(), 1e-5)); + } + } + } +} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index f5cc7280aa8bb..6804e5a857f57 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -21,6 +21,9 @@ import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.AbstractPointGeometryFieldMapper; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.BlockSourceReader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; @@ -117,6 +120,7 @@ public FieldMapper build(MapperBuilderContext context) { stored.get(), hasDocValues.get(), parser, + nullValue.get(), meta.get() ); return new PointFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo, parser, this); @@ -182,6 +186,7 @@ public FieldMapper.Builder getMergeBuilder() { public static class PointFieldType extends AbstractGeometryFieldType implements ShapeQueryable { private final ShapeQueryPointProcessor queryProcessor; + private final CartesianPoint nullValue; private PointFieldType( String name, @@ -189,15 +194,17 @@ private PointFieldType( boolean stored, boolean hasDocValues, CartesianPointParser parser, + CartesianPoint nullValue, Map meta ) { super(name, indexed, stored, hasDocValues, parser, meta); + this.nullValue = nullValue; this.queryProcessor = new ShapeQueryPointProcessor(); } // only used in test public PointFieldType(String name) { - this(name, true, false, true, null, Collections.emptyMap()); + this(name, true, false, true, null, null, Collections.emptyMap()); } @Override @@ -224,6 +231,17 @@ public Query shapeQuery(Geometry shape, String fieldName, ShapeRelation relation protected Function, List> getFormatter(String format) { return GeometryFormatterFactory.getFormatter(format, p -> new Point(p.getX(), p.getY())); } + + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (hasDocValues()) { + return new BlockDocValuesReader.LongsBlockLoader(name()); + } + // TODO: Currently we use longs in the compute engine and render to WKT in ESQL + return new BlockSourceReader.LongsBlockLoader( + valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKT) + ); + } } /** CartesianPoint parser implementation */ diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml index 531f30d42ece0..30e56cd9cc748 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/30_types.yml @@ -865,3 +865,63 @@ synthetic _source text with parent keyword: - match: {columns.0.type: text} - length: {values: 1} - match: {values.0.0: jack of diamonds} + +--- +geo_point: + - do: + indices.create: + index: test + body: + mappings: + properties: + location: + type: geo_point + + - do: + bulk: + index: test + refresh: true + body: + - { "index": { } } + - { "location": "POINT(1 -1)" } + + - do: + warnings: + - "No limit defined, adding default limit of [500]" + esql.query: + body: + query: 'from test' + - match: { columns.0.name: location } + - match: { columns.0.type: geo_point } + - length: { values: 1 } + - match: { values.0.0: "POINT (0.9999999403953552 -1.000000024214387)" } + +--- +cartesian_point: + - do: + indices.create: + index: test + body: + mappings: + properties: + location: + type: point + + - do: + bulk: + index: test + refresh: true + body: + - { "index": { } } + - { "location": "POINT(4321 -1234)" } + + - do: + warnings: + - "No limit defined, adding default limit of [500]" + esql.query: + body: + query: 'from test' + - match: { columns.0.name: location } + - match: { columns.0.type: cartesian_point } + - length: { values: 1 } + - match: { values.0.0: "POINT (4321.0 -1234.0)" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index be5b43433983e..181cf52b66c7c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -42,6 +42,8 @@ setup: geo_point_alias: type: alias path: geo_point + geo_shape: + type: geo_shape ip_range: type: ip_range long_range: @@ -50,6 +52,8 @@ setup: type: match_only_text nested: type: nested + point: + type: point rank_feature: type: rank_feature rank_features: @@ -86,12 +90,14 @@ setup: "double_range": { "gte": 1.0, "lte": 2.0 }, "float_range": { "gte": 1.0, "lte": 2.0 }, "geo_point": [ 10.0, 12.0 ], + "geo_shape": "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)", "histogram": { "values": [ 0.1, 0.25, 0.35, 0.4, 0.45, 0.5 ], "counts": [ 8, 17, 8, 7, 6, 2 ] }, "integer_range": { "gte": 1, "lte": 2 }, "ip_range": "127.0.0.1/16", "long_range": { "gte": 1, "lte": 2 }, "match_only_text": "foo bar baz", "name": "Alice", + "point": "POINT (-97.15447 25.9961525)", "rank_feature": 10, "rank_features": { "foo": 10, "bar": 20 }, "search_as_you_type": "foo bar baz", @@ -103,6 +109,10 @@ setup: --- unsupported: + - skip: + version: " - 8.11.99" + reason: "Latest types supported in ESQL starting with 8.12.0" + - do: allowed_warnings_regex: - "Field \\[.*\\] cannot be retrieved, it is unsupported or not indexed; returning null" @@ -128,43 +138,47 @@ unsupported: - match: { columns.7.name: float_range } - match: { columns.7.type: unsupported } - match: { columns.8.name: geo_point } - - match: { columns.8.type: unsupported } + - match: { columns.8.type: geo_point } - match: { columns.9.name: geo_point_alias } - - match: { columns.9.type: unsupported } - - match: { columns.10.name: histogram } + - match: { columns.9.type: geo_point } + - match: { columns.10.name: geo_shape } - match: { columns.10.type: unsupported } - - match: { columns.11.name: integer_range } + - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - - match: { columns.12.name: ip_range } + - match: { columns.12.name: integer_range } - match: { columns.12.type: unsupported } - - match: { columns.13.name: long_range } + - match: { columns.13.name: ip_range } - match: { columns.13.type: unsupported } - - match: { columns.14.name: match_only_text } - - match: { columns.14.type: text } - - match: { columns.15.name: name } - - match: { columns.15.type: keyword } - - match: { columns.16.name: rank_feature } - - match: { columns.16.type: unsupported } - - match: { columns.17.name: rank_features } - - match: { columns.17.type: unsupported } - - match: { columns.18.name: search_as_you_type } + - match: { columns.14.name: long_range } + - match: { columns.14.type: unsupported } + - match: { columns.15.name: match_only_text } + - match: { columns.15.type: text } + - match: { columns.16.name: name } + - match: { columns.16.type: keyword } + - match: { columns.17.name: point } + - match: { columns.17.type: cartesian_point } + - match: { columns.18.name: rank_feature } - match: { columns.18.type: unsupported } - - match: { columns.19.name: search_as_you_type._2gram } + - match: { columns.19.name: rank_features } - match: { columns.19.type: unsupported } - - match: { columns.20.name: search_as_you_type._3gram } + - match: { columns.20.name: search_as_you_type } - match: { columns.20.type: unsupported } - - match: { columns.21.name: search_as_you_type._index_prefix } + - match: { columns.21.name: search_as_you_type._2gram } - match: { columns.21.type: unsupported } - - match: { columns.22.name: shape } + - match: { columns.22.name: search_as_you_type._3gram } - match: { columns.22.type: unsupported } - - match: { columns.23.name: some_doc.bar } - - match: { columns.23.type: long } - - match: { columns.24.name: some_doc.foo } - - match: { columns.24.type: keyword } - - match: { columns.25.name: text } - - match: { columns.25.type: text } - - match: { columns.26.name: token_count } - - match: { columns.26.type: integer } + - match: { columns.23.name: search_as_you_type._index_prefix } + - match: { columns.23.type: unsupported } + - match: { columns.24.name: shape } + - match: { columns.24.type: unsupported } + - match: { columns.25.name: some_doc.bar } + - match: { columns.25.type: long } + - match: { columns.26.name: some_doc.foo } + - match: { columns.26.type: keyword } + - match: { columns.27.name: text } + - match: { columns.27.type: text } + - match: { columns.28.name: token_count } + - match: { columns.28.type: integer } - length: { values: 1 } - match: { values.0.0: null } @@ -175,25 +189,27 @@ unsupported: - match: { values.0.5: null } - match: { values.0.6: null } - match: { values.0.7: null } - - match: { values.0.8: null } - - match: { values.0.9: null } + - match: { values.0.8: "POINT (9.999999990686774 11.999999997206032)" } + - match: { values.0.9: "POINT (9.999999990686774 11.999999997206032)" } - match: { values.0.10: null } - match: { values.0.11: null } - match: { values.0.12: null } - match: { values.0.13: null } - - match: { values.0.14: "foo bar baz" } - - match: { values.0.15: Alice } - - match: { values.0.16: null } - - match: { values.0.17: null } + - match: { values.0.14: null } + - match: { values.0.15: "foo bar baz" } + - match: { values.0.16: Alice } + - match: { values.0.17: "POINT (-97.15447235107422 25.996152877807617)" } - match: { values.0.18: null } - match: { values.0.19: null } - match: { values.0.20: null } - match: { values.0.21: null } - match: { values.0.22: null } - - match: { values.0.23: 12 } - - match: { values.0.24: xy } - - match: { values.0.25: "foo bar" } - - match: { values.0.26: 3 } + - match: { values.0.23: null } + - match: { values.0.24: null } + - match: { values.0.25: 12 } + - match: { values.0.26: xy } + - match: { values.0.27: "foo bar" } + - match: { values.0.28: 3 } # limit 0 @@ -218,43 +234,47 @@ unsupported: - match: { columns.7.name: float_range } - match: { columns.7.type: unsupported } - match: { columns.8.name: geo_point } - - match: { columns.8.type: unsupported } + - match: { columns.8.type: geo_point } - match: { columns.9.name: geo_point_alias } - - match: { columns.9.type: unsupported } - - match: { columns.10.name: histogram } + - match: { columns.9.type: geo_point } + - match: { columns.10.name: geo_shape } - match: { columns.10.type: unsupported } - - match: { columns.11.name: integer_range } + - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - - match: { columns.12.name: ip_range } + - match: { columns.12.name: integer_range } - match: { columns.12.type: unsupported } - - match: { columns.13.name: long_range } + - match: { columns.13.name: ip_range } - match: { columns.13.type: unsupported } - - match: { columns.14.name: match_only_text } - - match: { columns.14.type: text } - - match: { columns.15.name: name } - - match: { columns.15.type: keyword } - - match: { columns.16.name: rank_feature } - - match: { columns.16.type: unsupported } - - match: { columns.17.name: rank_features } - - match: { columns.17.type: unsupported } - - match: { columns.18.name: search_as_you_type } + - match: { columns.14.name: long_range } + - match: { columns.14.type: unsupported } + - match: { columns.15.name: match_only_text } + - match: { columns.15.type: text } + - match: { columns.16.name: name } + - match: { columns.16.type: keyword } + - match: { columns.17.name: point } + - match: { columns.17.type: cartesian_point } + - match: { columns.18.name: rank_feature } - match: { columns.18.type: unsupported } - - match: { columns.19.name: search_as_you_type._2gram } + - match: { columns.19.name: rank_features } - match: { columns.19.type: unsupported } - - match: { columns.20.name: search_as_you_type._3gram } + - match: { columns.20.name: search_as_you_type } - match: { columns.20.type: unsupported } - - match: { columns.21.name: search_as_you_type._index_prefix } + - match: { columns.21.name: search_as_you_type._2gram } - match: { columns.21.type: unsupported } - - match: { columns.22.name: shape } + - match: { columns.22.name: search_as_you_type._3gram } - match: { columns.22.type: unsupported } - - match: { columns.23.name: some_doc.bar } - - match: { columns.23.type: long } - - match: { columns.24.name: some_doc.foo } - - match: { columns.24.type: keyword } - - match: { columns.25.name: text } - - match: { columns.25.type: text } - - match: { columns.26.name: token_count } - - match: { columns.26.type: integer } + - match: { columns.23.name: search_as_you_type._index_prefix } + - match: { columns.23.type: unsupported } + - match: { columns.24.name: shape } + - match: { columns.24.type: unsupported } + - match: { columns.25.name: some_doc.bar } + - match: { columns.25.type: long } + - match: { columns.26.name: some_doc.foo } + - match: { columns.26.type: keyword } + - match: { columns.27.name: text } + - match: { columns.27.type: text } + - match: { columns.28.name: token_count } + - match: { columns.28.type: integer } - length: { values: 0 } @@ -269,6 +289,10 @@ unsupported: --- unsupported with sort: + - skip: + version: " - 8.11.99" + reason: "Latest types supported in ESQL starting with 8.12.0" + - do: allowed_warnings_regex: - "Field \\[.*\\] cannot be retrieved, it is unsupported or not indexed; returning null" @@ -294,43 +318,47 @@ unsupported with sort: - match: { columns.7.name: float_range } - match: { columns.7.type: unsupported } - match: { columns.8.name: geo_point } - - match: { columns.8.type: unsupported } + - match: { columns.8.type: geo_point } - match: { columns.9.name: geo_point_alias } - - match: { columns.9.type: unsupported } - - match: { columns.10.name: histogram } + - match: { columns.9.type: geo_point } + - match: { columns.10.name: geo_shape } - match: { columns.10.type: unsupported } - - match: { columns.11.name: integer_range } + - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - - match: { columns.12.name: ip_range } + - match: { columns.12.name: integer_range } - match: { columns.12.type: unsupported } - - match: { columns.13.name: long_range } + - match: { columns.13.name: ip_range } - match: { columns.13.type: unsupported } - - match: { columns.14.name: match_only_text } - - match: { columns.14.type: text } - - match: { columns.15.name: name } - - match: { columns.15.type: keyword } - - match: { columns.16.name: rank_feature } - - match: { columns.16.type: unsupported } - - match: { columns.17.name: rank_features } - - match: { columns.17.type: unsupported } - - match: { columns.18.name: search_as_you_type } + - match: { columns.14.name: long_range } + - match: { columns.14.type: unsupported } + - match: { columns.15.name: match_only_text } + - match: { columns.15.type: text } + - match: { columns.16.name: name } + - match: { columns.16.type: keyword } + - match: { columns.17.name: point } + - match: { columns.17.type: cartesian_point } + - match: { columns.18.name: rank_feature } - match: { columns.18.type: unsupported } - - match: { columns.19.name: search_as_you_type._2gram } + - match: { columns.19.name: rank_features } - match: { columns.19.type: unsupported } - - match: { columns.20.name: search_as_you_type._3gram } + - match: { columns.20.name: search_as_you_type } - match: { columns.20.type: unsupported } - - match: { columns.21.name: search_as_you_type._index_prefix } + - match: { columns.21.name: search_as_you_type._2gram } - match: { columns.21.type: unsupported } - - match: { columns.22.name: shape } + - match: { columns.22.name: search_as_you_type._3gram } - match: { columns.22.type: unsupported } - - match: { columns.23.name: some_doc.bar } - - match: { columns.23.type: long } - - match: { columns.24.name: some_doc.foo } - - match: { columns.24.type: keyword } - - match: { columns.25.name: text } - - match: { columns.25.type: text } - - match: { columns.26.name: token_count } - - match: { columns.26.type: integer } + - match: { columns.23.name: search_as_you_type._index_prefix } + - match: { columns.23.type: unsupported } + - match: { columns.24.name: shape } + - match: { columns.24.type: unsupported } + - match: { columns.25.name: some_doc.bar } + - match: { columns.25.type: long } + - match: { columns.26.name: some_doc.foo } + - match: { columns.26.type: keyword } + - match: { columns.27.name: text } + - match: { columns.27.type: text } + - match: { columns.28.name: token_count } + - match: { columns.28.type: integer } - length: { values: 1 } - match: { values.0.0: null } @@ -341,22 +369,56 @@ unsupported with sort: - match: { values.0.5: null } - match: { values.0.6: null } - match: { values.0.7: null } - - match: { values.0.8: null } - - match: { values.0.9: null } + - match: { values.0.8: "POINT (9.999999990686774 11.999999997206032)" } + - match: { values.0.9: "POINT (9.999999990686774 11.999999997206032)" } - match: { values.0.10: null } - match: { values.0.11: null } - match: { values.0.12: null } - match: { values.0.13: null } - - match: { values.0.14: "foo bar baz" } - - match: { values.0.15: Alice } - - match: { values.0.16: null } - - match: { values.0.17: null } + - match: { values.0.14: null } + - match: { values.0.15: "foo bar baz" } + - match: { values.0.16: Alice } + - match: { values.0.17: "POINT (-97.15447235107422 25.996152877807617)" } - match: { values.0.18: null } - match: { values.0.19: null } - match: { values.0.20: null } - match: { values.0.21: null } - match: { values.0.22: null } - - match: { values.0.23: 12 } - - match: { values.0.24: xy } - - match: { values.0.25: "foo bar" } - - match: { values.0.26: 3 } + - match: { values.0.23: null } + - match: { values.0.24: null } + - match: { values.0.25: 12 } + - match: { values.0.26: xy } + - match: { values.0.27: "foo bar" } + - match: { values.0.28: 3 } + +--- +spatial types unsupported in 8.11: + - skip: + version: " - 8.10.99, 8.12.0 - " + reason: "Elasticsearch 8.11 did not support any spatial types" + + - do: + allowed_warnings_regex: + - "Field \\[.*\\] cannot be retrieved, it is unsupported or not indexed; returning null" + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'from test | keep geo_point, geo_point_alias, point, geo_shape, shape' + + - match: { columns.0.name: geo_point } + - match: { columns.0.type: unsupported } + - match: { columns.1.name: geo_point_alias } + - match: { columns.1.type: unsupported } + - match: { columns.2.name: point } + - match: { columns.2.type: unsupported } + - match: { columns.3.name: geo_shape } + - match: { columns.3.type: unsupported } + - match: { columns.4.name: shape } + - match: { columns.4.type: unsupported } + + - length: { values: 1 } + - match: { values.0.0: null } + - match: { values.0.1: null } + - match: { values.0.2: null } + - match: { values.0.3: null } + - match: { values.0.4: null } From 56608c924f8a240053ac4759f5acff28915f7f39 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 30 Nov 2023 16:47:28 +0100 Subject: [PATCH 108/263] Adjust the maximum resolution used in GeoHexVisitorTests (#102804) --- .../search/aggregations/bucket/geogrid/GeoHexVisitorTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java index 0dbd6323b8493..3e0ec0fc9b769 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java @@ -51,7 +51,7 @@ private void doTestGeometry(LongFunction h3ToGeometry, boolean hasArea // we ignore polar cells are they are problematic and do not keep the relationships long h3 = randomValueOtherThanMany( l -> l == H3.geoToH3(90, 0, H3.getResolution(l)) || l == H3.geoToH3(-90, 0, H3.getResolution(l)), - () -> H3.geoToH3(GeoTestUtil.nextLatitude(), GeoTestUtil.nextLongitude(), randomIntBetween(2, 14)) + () -> H3.geoToH3(GeoTestUtil.nextLatitude(), GeoTestUtil.nextLongitude(), randomIntBetween(2, 13)) ); long centerChild = H3.childPosToH3(h3, 0); // children position 3 is chosen so we never use a polar polygon From 6b9dfab01ca69cebc8e2c22059f18754cbaee8c1 Mon Sep 17 00:00:00 2001 From: Pierre Gayvallet Date: Thu, 30 Nov 2023 17:01:21 +0100 Subject: [PATCH 109/263] Add memory utilization Kibana metric to the monitoring index templates (#102810) * Add memory utilization Kibana metric to the monitoring index templates * add changelog entry --- docs/changelog/102810.yaml | 5 +++ .../exporter/MonitoringTemplateUtils.java | 2 +- .../main/resources/monitoring-kibana-mb.json | 34 +++++++++++++++++++ .../MonitoringTemplateRegistry.java | 4 +-- 4 files changed, 42 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/102810.yaml diff --git a/docs/changelog/102810.yaml b/docs/changelog/102810.yaml new file mode 100644 index 0000000000000..f5faf7a321dbc --- /dev/null +++ b/docs/changelog/102810.yaml @@ -0,0 +1,5 @@ +pr: 102810 +summary: Add memory utilization Kibana metric to the monitoring index templates +area: Monitoring +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/exporter/MonitoringTemplateUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/exporter/MonitoringTemplateUtils.java index b600fec54b29b..a0e3421f470d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/exporter/MonitoringTemplateUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/exporter/MonitoringTemplateUtils.java @@ -18,7 +18,7 @@ public final class MonitoringTemplateUtils { *

    * It may be possible for this to diverge between templates and pipelines, but for now they're the same. */ - public static final int LAST_UPDATED_VERSION = 8_08_00_99; + public static final int LAST_UPDATED_VERSION = 8_12_00_99; /** * Current version of templates used in their name to differentiate from breaking changes (separate from product version). diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json index 9be7c09ec25cb..fe51468ca0b84 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-kibana-mb.json @@ -80,6 +80,20 @@ } } }, + "array_buffers": { + "properties": { + "bytes": { + "type": "long" + } + } + }, + "external": { + "properties": { + "bytes": { + "type": "long" + } + } + }, "heap": { "properties": { "total": { @@ -190,6 +204,26 @@ "platform": { "ignore_above": 1024, "type": "keyword" + }, + "cpuacct": { + "properties": { + "control_group": { + "type": "keyword" + }, + "usage_nanos": { + "type": "long" + } + } + }, + "cgroup_memory": { + "properties": { + "current_in_bytes": { + "type": "long" + }, + "swap_current_in_bytes": { + "type": "long" + } + } } } }, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index 442cd2479f87c..5ea99aeea4092 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -49,7 +49,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * continue to use the release version number in this registry, even though this is not standard practice for template * registries. */ - public static final int REGISTRY_VERSION = 8_08_00_99; + public static final int REGISTRY_VERSION = 8_12_00_99; private static final String REGISTRY_VERSION_VARIABLE = "xpack.monitoring.template.release.version"; /** @@ -77,7 +77,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 11; + public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 12; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; From b5471cdf867846719a7764196cf39819468b3d9f Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Thu, 30 Nov 2023 18:38:57 +0200 Subject: [PATCH 110/263] Active shards message corrected for search shards (#102808) This fixes the message returned to mention the search shards (that the logic considers when calculating the boolean to return). For example, if no search shard is active, the message was previously returning "(have 1, need 1)" because it was considering the primary shard. However it is now corrected to say "have 0" since the logic considers only search shards. Fixes #101896 --- docs/changelog/102808.yaml | 6 +++ .../action/support/ActiveShardCount.java | 32 ++++++++++---- .../replication/ReplicationOperation.java | 7 +-- .../ReplicationOperationTests.java | 43 ++++++++++++++++--- .../ClusterStateCreationUtils.java | 21 ++++++++- 5 files changed, 92 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/102808.yaml diff --git a/docs/changelog/102808.yaml b/docs/changelog/102808.yaml new file mode 100644 index 0000000000000..4e3df80a28319 --- /dev/null +++ b/docs/changelog/102808.yaml @@ -0,0 +1,6 @@ +pr: 102808 +summary: Active shards message corrected for search shards +area: Distributed +type: bug +issues: + - 101896 diff --git a/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java b/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java index 34f0ce1a2fe2e..f6934e8d20ae6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java +++ b/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java @@ -153,7 +153,7 @@ public boolean enoughShardsActive(final ClusterState clusterState, final String. waitForActiveShards = SETTING_WAIT_FOR_ACTIVE_SHARDS.get(indexMetadata.getSettings()); } for (int i = 0; i < indexRoutingTable.size(); i++) { - if (waitForActiveShards.enoughShardsActive(indexRoutingTable.shard(i)) == false) { + if (waitForActiveShards.enoughShardsActive(indexRoutingTable.shard(i)).enoughShards() == false) { // not enough active shard copies yet return false; } @@ -164,20 +164,36 @@ public boolean enoughShardsActive(final ClusterState clusterState, final String. } /** - * Returns true iff the active shard count in the shard routing table is enough - * to meet the required shard count represented by this instance. + * Record that captures the decision of {@link #enoughShardsActive(IndexShardRoutingTable)}. + * @param enoughShards the decision of whether the active shard count is enough to meet the required shard count of this instance + * @param currentActiveShards the currently active shards considered for making the decision */ - public boolean enoughShardsActive(final IndexShardRoutingTable shardRoutingTable) { + public record EnoughShards(boolean enoughShards, int currentActiveShards) {}; + + /** + * Returns a {@link EnoughShards} record where the first value is true iff the active shard count in the shard routing table is enough + * to meet the required shard count represented by this instance, and the second value is the active shard count. + */ + public EnoughShards enoughShardsActive(final IndexShardRoutingTable shardRoutingTable) { final int activeShardCount = shardRoutingTable.activeShards().size(); + boolean enoughShards = false; + int currentActiveShards = activeShardCount; if (this == ActiveShardCount.ALL) { - return activeShardCount == shardRoutingTable.size(); + enoughShards = activeShardCount == shardRoutingTable.size(); } else if (value == 0) { - return true; + enoughShards = true; } else if (value == 1) { - return shardRoutingTable.hasSearchShards() ? shardRoutingTable.getActiveSearchShardCount() >= 1 : activeShardCount >= 1; + if (shardRoutingTable.hasSearchShards()) { + enoughShards = shardRoutingTable.getActiveSearchShardCount() >= 1; + currentActiveShards = shardRoutingTable.getActiveSearchShardCount(); + } else { + enoughShards = activeShardCount >= 1; + } } else { - return shardRoutingTable.getActiveSearchShardCount() >= value; + enoughShards = shardRoutingTable.getActiveSearchShardCount() >= value; + currentActiveShards = shardRoutingTable.getActiveSearchShardCount(); } + return new EnoughShards(enoughShards, currentActiveShards); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java index 1604ff81603ab..68cc02a613aae 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java @@ -430,7 +430,8 @@ protected String checkActiveShardCount() { return null; // not waiting for any shards } final IndexShardRoutingTable shardRoutingTable = primary.getReplicationGroup().getRoutingTable(); - if (waitForActiveShards.enoughShardsActive(shardRoutingTable)) { + ActiveShardCount.EnoughShards enoughShardsActive = waitForActiveShards.enoughShardsActive(shardRoutingTable); + if (enoughShardsActive.enoughShards()) { return null; } else { final String resolvedShards = waitForActiveShards == ActiveShardCount.ALL @@ -441,7 +442,7 @@ protected String checkActiveShardCount() { + "request [{}]", shardId, waitForActiveShards, - shardRoutingTable.activeShards().size(), + enoughShardsActive.currentActiveShards(), resolvedShards, opType, request @@ -449,7 +450,7 @@ protected String checkActiveShardCount() { return "Not enough active copies to meet shard count of [" + waitForActiveShards + "] (have " - + shardRoutingTable.activeShards().size() + + enoughShardsActive.currentActiveShards() + ", needed " + resolvedShards + ")."; diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java index 16ced0ead53e8..c774620ea41d2 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.shard.IndexShardNotStartedException; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ReplicationGroup; @@ -45,6 +46,7 @@ import java.net.InetAddress; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -65,7 +67,6 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class ReplicationOperationTests extends ESTestCase { @@ -439,10 +440,17 @@ public void testWaitForActiveShards() throws Exception { final int unassignedReplicas = randomInt(2); final int totalShards = 1 + assignedReplicas + unassignedReplicas; final int activeShardCount = randomIntBetween(0, totalShards); + final boolean unpromotableReplicas = randomBoolean(); Request request = new Request(shardId).waitForActiveShards( activeShardCount == totalShards ? ActiveShardCount.ALL : ActiveShardCount.from(activeShardCount) ); - final boolean passesActiveShardCheck = activeShardCount <= assignedReplicas + 1; + // In the case of unpromotables, only the search/replica assigned shards are calculated as active shards. But in other cases, or + // when the wait is for ALL active shards, ReplicationOperation#checkActiveShardCount() takes into account the primary shard as + // well, and that is why we need to increment the assigned replicas by 1 when calculating the actual active shards. + final int actualActiveShards = assignedReplicas + ((unpromotableReplicas && request.waitForActiveShards() != ActiveShardCount.ALL) + ? 0 + : 1); + final boolean passesActiveShardCheck = activeShardCount <= actualActiveShards; ShardRoutingState[] replicaStates = new ShardRoutingState[assignedReplicas + unassignedReplicas]; for (int i = 0; i < assignedReplicas; i++) { @@ -452,12 +460,26 @@ public void testWaitForActiveShards() throws Exception { replicaStates[i] = ShardRoutingState.UNASSIGNED; } - final ClusterState state = state(index, true, ShardRoutingState.STARTED, replicaStates); + final ClusterState state = state( + index, + true, + ShardRoutingState.STARTED, + unpromotableReplicas ? ShardRouting.Role.INDEX_ONLY : ShardRouting.Role.DEFAULT, + Arrays.stream(replicaStates) + .map( + shardRoutingState -> new Tuple<>( + shardRoutingState, + unpromotableReplicas ? ShardRouting.Role.SEARCH_ONLY : ShardRouting.Role.DEFAULT + ) + ) + .toList() + ); logger.debug( - "using active shard count of [{}], assigned shards [{}], total shards [{}]." + " expecting op to [{}]. using state: \n{}", + "using active shards [{}], assigned shards [{}], total shards [{}]. unpromotable [{}]. expecting op to [{}]. state: \n{}", request.waitForActiveShards(), 1 + assignedReplicas, 1 + assignedReplicas + unassignedReplicas, + unpromotableReplicas, passesActiveShardCheck ? "succeed" : "retry", state ); @@ -487,7 +509,18 @@ public void testWaitForActiveShards() throws Exception { op.execute(); assertTrue("operations should have been performed, active shard count is met", request.processedOnPrimary.get()); } else { - assertThat(op.checkActiveShardCount(), notNullValue()); + assertThat( + op.checkActiveShardCount(), + equalTo( + "Not enough active copies to meet shard count of [" + + request.waitForActiveShards() + + "] (have " + + actualActiveShards + + ", needed " + + activeShardCount + + ")." + ) + ); op.execute(); assertFalse("operations should not have been perform, active shard count is *NOT* met", request.processedOnPrimary.get()); assertListenerThrows("should throw exception to trigger retry", listener, UnavailableShardsException.class); diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index c40df091a4521..756757a162568 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -88,6 +88,25 @@ public static ClusterState state( boolean activePrimaryLocal, ShardRoutingState primaryState, List> replicaStates + ) { + return state(index, activePrimaryLocal, primaryState, ShardRouting.Role.DEFAULT, replicaStates); + } + + /** + * Creates cluster state with and index that has one shard and #(replicaStates) replicas with given roles + * + * @param index name of the index + * @param activePrimaryLocal if active primary should coincide with the local node in the cluster state + * @param primaryState state of primary + * @param primaryRole role of primary + * @param replicaStates states and roles of the replicas. length of this collection determines also the number of replicas + */ + public static ClusterState state( + String index, + boolean activePrimaryLocal, + ShardRoutingState primaryState, + ShardRouting.Role primaryRole, + List> replicaStates ) { assert primaryState == ShardRoutingState.STARTED || primaryState == ShardRoutingState.RELOCATING @@ -155,7 +174,7 @@ public static ClusterState state( unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null); } indexShardRoutingBuilder.addShard( - TestShardRouting.newShardRouting(index, 0, primaryNode, relocatingNode, true, primaryState, unassignedInfo) + TestShardRouting.newShardRouting(index, 0, primaryNode, relocatingNode, true, primaryState, unassignedInfo, primaryRole) ); for (var replicaState : replicaStates) { From a6c4445f184c7d96a3df2185f97cba3394f4b1dc Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 30 Nov 2023 17:49:15 +0100 Subject: [PATCH 111/263] Fix more leaked SearchResponse instances in tests (#102822) Couple more of these fixed for #102030 --- .../cluster/routing/ShardRoutingRoleIT.java | 28 ++--- .../SearchServiceCleanupOnLostMasterIT.java | 4 +- .../search/SearchWithRejectionsIT.java | 2 +- .../SignificantTermsSignificanceScoreIT.java | 3 +- .../CardinalityWithRequestBreakerIT.java | 2 +- .../bucket/ShardSizeTestCase.java | 19 +-- .../SharedSignificantTermsTestMethods.java | 38 +++--- .../FrozenSearchableSnapshotsIntegTests.java | 109 ++++++++++-------- ...tsBlobStoreCacheMaintenanceIntegTests.java | 55 +++++---- 9 files changed, 143 insertions(+), 117 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java index 3418874bd5902..8843e7ff39bc6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java @@ -68,6 +68,7 @@ import java.util.stream.IntStream; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; @@ -504,11 +505,13 @@ public void testSearchRouting() throws Exception { // do nothing } } - final var profileResults = search.get().getProfileResults(); - assertThat(profileResults, not(anEmptyMap())); - for (final var searchShardProfileKey : profileResults.keySet()) { - assertThat(searchShardProfileKeys, hasItem(searchShardProfileKey)); - } + assertResponse(search, resp -> { + final var profileResults = resp.getProfileResults(); + assertThat(profileResults, not(anEmptyMap())); + for (final var searchShardProfileKey : profileResults.keySet()) { + assertThat(searchShardProfileKeys, hasItem(searchShardProfileKey)); + } + }); } // Search with PIT for (int i = 0; i < 10; i++) { @@ -524,14 +527,13 @@ public void testSearchRouting() throws Exception { } String pitId = client().execute(TransportOpenPointInTimeAction.TYPE, openRequest).actionGet().getPointInTimeId(); try { - final var profileResults = prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)) - .setProfile(true) - .get() - .getProfileResults(); - assertThat(profileResults, not(anEmptyMap())); - for (final var profileKey : profileResults.keySet()) { - assertThat(profileKey, in(searchShardProfileKeys)); - } + assertResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setProfile(true), response -> { + var profileResults = response.getProfileResults(); + assertThat(profileResults, not(anEmptyMap())); + for (final var profileKey : profileResults.keySet()) { + assertThat(profileKey, in(searchShardProfileKeys)); + } + }); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitId)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchServiceCleanupOnLostMasterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchServiceCleanupOnLostMasterIT.java index 000dccdee34c6..398226e868d47 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchServiceCleanupOnLostMasterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchServiceCleanupOnLostMasterIT.java @@ -21,6 +21,7 @@ import java.util.Collection; import java.util.List; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; @@ -69,8 +70,7 @@ private void testLostMaster(CheckedBiConsumer loseMas index("test", "test", "{}"); - assertThat(prepareSearch("test").setScroll("30m").get().getScrollId(), is(notNullValue())); - + assertResponse(prepareSearch("test").setScroll("30m"), response -> assertThat(response.getScrollId(), is(notNullValue()))); loseMaster.accept(master, dataNode); // in the past, this failed because the search context for the scroll would prevent the shard lock from being released. ensureYellow(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java index 51c1269b87675..5030438f829fc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java @@ -52,7 +52,7 @@ public void testOpenContextsAfterRejections() throws Exception { } for (int i = 0; i < numSearches; i++) { try { - responses[i].get(); + responses[i].get().decRef(); } catch (Exception t) {} } assertBusy( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index b672325891b50..da1376a300728 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -237,8 +237,7 @@ public void testPopularTermManyDeletedDocs() throws Exception { ); } - request.get(); - + request.get().decRef(); } @Override diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index 8b1fa4abe09a5..5e086766dc711 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -53,7 +53,7 @@ public void testRequestBreaker() throws Exception { .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) .order(BucketOrder.aggregation("cardinality", randomBoolean())) .subAggregation(cardinality("cardinality").precisionThreshold(randomLongBetween(1, 40000)).field("field1.keyword")) - ).get(); + ).get().decRef(); } catch (ElasticsearchException e) { if (ExceptionsHelper.unwrap(e, CircuitBreakingException.class) == null) { throw e; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index 549461418cabf..cfee97891aa32 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -19,6 +18,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.is; @@ -78,14 +78,15 @@ protected void indexData() throws Exception { indexRandom(true, docs); - SearchResponse resp = prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()).get(); - assertNoFailures(resp); - long totalOnOne = resp.getHits().getTotalHits().value; - assertThat(totalOnOne, is(15L)); - resp = prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()).get(); - assertNoFailures(resp); - long totalOnTwo = resp.getHits().getTotalHits().value; - assertThat(totalOnTwo, is(12L)); + assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()), resp -> { + long totalOnOne = resp.getHits().getTotalHits().value; + assertThat(totalOnOne, is(15L)); + }); + assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()), resp -> { + assertNoFailures(resp); + long totalOnTwo = resp.getHits().getTotalHits().value; + assertThat(totalOnTwo, is(12L)); + }); } protected List indexDoc(String shard, String key, int times) throws Exception { diff --git a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index dd93eb6b51ea6..0b84f14c56ecb 100644 --- a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -9,7 +9,6 @@ package org.elasticsearch.test.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; @@ -29,7 +28,7 @@ import static org.elasticsearch.test.ESIntegTestCase.client; import static org.elasticsearch.test.ESIntegTestCase.prepareSearch; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.equalTo; public class SharedSignificantTermsTestMethods { @@ -48,22 +47,25 @@ public static void aggregateAndCheckFromSeveralShards(ESIntegTestCase testCase) } private static void checkSignificantTermsAggregationCorrect(ESIntegTestCase testCase) { - SearchResponse response = prepareSearch(INDEX_NAME).addAggregation( - terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD)) - ).get(); - assertNoFailures(response); - StringTerms classes = response.getAggregations().get("class"); - Assert.assertThat(classes.getBuckets().size(), equalTo(2)); - for (Terms.Bucket classBucket : classes.getBuckets()) { - Map aggs = classBucket.getAggregations().asMap(); - Assert.assertTrue(aggs.containsKey("sig_terms")); - SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); - Assert.assertThat(agg.getBuckets().size(), equalTo(1)); - SignificantTerms.Bucket sigBucket = agg.iterator().next(); - String term = sigBucket.getKeyAsString(); - String classTerm = classBucket.getKeyAsString(); - Assert.assertTrue(term.equals(classTerm)); - } + assertNoFailuresAndResponse( + prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD)) + ), + response -> { + StringTerms classes = response.getAggregations().get("class"); + Assert.assertThat(classes.getBuckets().size(), equalTo(2)); + for (Terms.Bucket classBucket : classes.getBuckets()) { + Map aggs = classBucket.getAggregations().asMap(); + Assert.assertTrue(aggs.containsKey("sig_terms")); + SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms"); + Assert.assertThat(agg.getBuckets().size(), equalTo(1)); + SignificantTerms.Bucket sigBucket = agg.iterator().next(); + String term = sigBucket.getKeyAsString(); + String classTerm = classBucket.getKeyAsString(); + Assert.assertTrue(term.equals(classTerm)); + } + } + ); } public static void index01Docs(String type, String settings, ESIntegTestCase testCase) throws ExecutionException, InterruptedException { diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java index daf61ff9a4931..5ef524f8211c1 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -70,7 +69,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_RECOVERY_STATE_FACTORY_KEY; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; @@ -110,19 +109,25 @@ public void testCreateAndRestorePartialSearchableSnapshot() throws Exception { populateIndex(indexName, 10_000); - final TotalHits originalAllHits = internalCluster().client() - .prepareSearch(indexName) - .setTrackTotalHits(true) - .get() - .getHits() - .getTotalHits(); - final TotalHits originalBarHits = internalCluster().client() + final TotalHits originalAllHits; + var originalResponse = internalCluster().client().prepareSearch(indexName).setTrackTotalHits(true).get(); + try { + originalAllHits = originalResponse.getHits().getTotalHits(); + } finally { + originalResponse.decRef(); + } + final TotalHits originalBarHits; + var barResponse = internalCluster().client() .prepareSearch(indexName) .setTrackTotalHits(true) .setQuery(matchQuery("foo", "bar")) - .get() - .getHits() - .getTotalHits(); + .get(); + try { + originalBarHits = barResponse.getHits().getTotalHits(); + } finally { + barResponse.decRef(); + } + logger.info("--> [{}] in total, of which [{}] match the query", originalAllHits, originalBarHits); expectThrows( @@ -462,25 +467,8 @@ public void testRequestCacheOnFrozen() throws Exception { // use a fixed client for the searches, as clients randomize timeouts, which leads to different cache entries Client client = client(); - final SearchResponse r1 = client.prepareSearch("test-index") - .setSize(0) - .setSearchType(SearchType.QUERY_THEN_FETCH) - .addAggregation( - dateHistogram("histo").field("f").timeZone(ZoneId.of("+01:00")).minDocCount(0).calendarInterval(DateHistogramInterval.MONTH) - ) - .get(); - assertNoFailures(r1); - - assertRequestCacheState(client(), "test-index", 0, 1); - - // The cached is actually used - assertThat( - indicesAdmin().prepareStats("test-index").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), - greaterThan(0L) - ); - - for (int i = 0; i < 10; ++i) { - final SearchResponse r2 = client.prepareSearch("test-index") + assertNoFailuresAndResponse( + client.prepareSearch("test-index") .setSize(0) .setSearchType(SearchType.QUERY_THEN_FETCH) .addAggregation( @@ -488,22 +476,51 @@ public void testRequestCacheOnFrozen() throws Exception { .timeZone(ZoneId.of("+01:00")) .minDocCount(0) .calendarInterval(DateHistogramInterval.MONTH) - ) - .get(); - assertNoFailures(r2); - assertRequestCacheState(client(), "test-index", i + 1, 1); - Histogram h1 = r1.getAggregations().get("histo"); - Histogram h2 = r2.getAggregations().get("histo"); - final List buckets1 = h1.getBuckets(); - final List buckets2 = h2.getBuckets(); - assertEquals(buckets1.size(), buckets2.size()); - for (int j = 0; j < buckets1.size(); ++j) { - final Histogram.Bucket b1 = buckets1.get(j); - final Histogram.Bucket b2 = buckets2.get(j); - assertEquals(b1.getKey(), b2.getKey()); - assertEquals(b1.getDocCount(), b2.getDocCount()); + ), + r1 -> { + assertRequestCacheState(client(), "test-index", 0, 1); + + // The cached is actually used + assertThat( + indicesAdmin().prepareStats("test-index") + .setRequestCache(true) + .get() + .getTotal() + .getRequestCache() + .getMemorySizeInBytes(), + greaterThan(0L) + ); + + for (int i = 0; i < 10; ++i) { + final int idx = i; + assertNoFailuresAndResponse( + client.prepareSearch("test-index") + .setSize(0) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .addAggregation( + dateHistogram("histo").field("f") + .timeZone(ZoneId.of("+01:00")) + .minDocCount(0) + .calendarInterval(DateHistogramInterval.MONTH) + ), + r2 -> { + assertRequestCacheState(client(), "test-index", idx + 1, 1); + Histogram h1 = r1.getAggregations().get("histo"); + Histogram h2 = r2.getAggregations().get("histo"); + final List buckets1 = h1.getBuckets(); + final List buckets2 = h2.getBuckets(); + assertEquals(buckets1.size(), buckets2.size()); + for (int j = 0; j < buckets1.size(); ++j) { + final Histogram.Bucket b1 = buckets1.get(j); + final Histogram.Bucket b2 = buckets2.get(j); + assertEquals(b1.getKey(), b2.getKey()); + assertEquals(b1.getDocCount(), b2.getDocCount()); + } + } + ); + } } - } + ); // shut down shard and check that cache entries are actually removed indicesAdmin().prepareClose("test-index").get(); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index c89df1edfa100..04233e47b7bcc 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -56,6 +56,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_BLOB_CACHE_INDEX; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_INDEX_ID_SETTING; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_INDEX_NAME_SETTING; @@ -166,28 +167,29 @@ public void testCleanUpAfterIndicesAreDeleted() throws Exception { for (String mountedIndex : mountedIndices.keySet()) { final Settings indexSettings = mountedIndices.get(mountedIndex).v1(); - final long remainingEntriesInCache = systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX) - .setQuery( - BlobStoreCacheMaintenanceService.buildDeleteByQuery( - INDEX_NUMBER_OF_SHARDS_SETTING.get(indexSettings), - SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings), - SNAPSHOT_INDEX_ID_SETTING.get(indexSettings) + assertResponse( + systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX) + .setQuery( + BlobStoreCacheMaintenanceService.buildDeleteByQuery( + INDEX_NUMBER_OF_SHARDS_SETTING.get(indexSettings), + SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings), + SNAPSHOT_INDEX_ID_SETTING.get(indexSettings) + ) ) - ) - .setSize(0) - .get() - .getHits() - .getTotalHits().value; - - if (indicesToDelete.contains(mountedIndex)) { - assertThat(remainingEntriesInCache, equalTo(0L)); - } else if (snapshotId.equals(SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings))) { - assertThat(remainingEntriesInCache, greaterThanOrEqualTo(mountedIndices.get(randomMountedIndex).v2())); - } else if (moreIndicesToDelete.contains(mountedIndex)) { - assertThat(remainingEntriesInCache, equalTo(0L)); - } else { - assertThat(remainingEntriesInCache, equalTo(mountedIndices.get(mountedIndex).v2())); - } + .setSize(0), + res -> { + final long remainingEntriesInCache = res.getHits().getTotalHits().value; + if (indicesToDelete.contains(mountedIndex)) { + assertThat(remainingEntriesInCache, equalTo(0L)); + } else if (snapshotId.equals(SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings))) { + assertThat(remainingEntriesInCache, greaterThanOrEqualTo(mountedIndices.get(randomMountedIndex).v2())); + } else if (moreIndicesToDelete.contains(mountedIndex)) { + assertThat(remainingEntriesInCache, equalTo(0L)); + } else { + assertThat(remainingEntriesInCache, equalTo(mountedIndices.get(mountedIndex).v2())); + } + } + ); } }); } @@ -316,13 +318,16 @@ private Client systemClient() { } private long numberOfEntriesInCache() { - return systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX) + var res = systemClient().prepareSearch(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN) .setTrackTotalHits(true) .setSize(0) - .get() - .getHits() - .getTotalHits().value; + .get(); + try { + return res.getHits().getTotalHits().value; + } finally { + res.decRef(); + } } private void refreshSystemIndex(boolean failIfNotExist) { From d68670fb88586ca7dc14fb57da4380faf226bbcb Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 30 Nov 2023 17:57:31 +0100 Subject: [PATCH 112/263] Inline ActionType in RequestBuilder constructors (#102800) These are always constant values. We can inline them everywhere to save code and easy removing the subclasses of ActionType going forward. --- .../SearchTemplateRequestBuilder.java | 7 +- .../index/rankeval/RankEvalRequestIT.java | 26 +-- .../rankeval/RankEvalRequestBuilder.java | 5 +- .../documentation/ReindexDocumentationIT.java | 28 ++- .../index/reindex/CrossClusterReindexIT.java | 26 +-- .../reindex/RethrottleRequestBuilder.java | 5 +- .../ReindexFromRemoteWithAuthTests.java | 9 +- .../reindex/ReindexSingleNodeTests.java | 5 +- .../reindex/ReindexTestCase.java | 11 +- .../org/elasticsearch/reindex/RetryTests.java | 11 +- .../health/ClusterHealthRequestBuilder.java | 4 +- .../NodesHotThreadsRequestBuilder.java | 5 +- .../node/info/NodesInfoRequestBuilder.java | 5 +- ...desReloadSecureSettingsRequestBuilder.java | 4 +- .../node/stats/NodesStatsRequestBuilder.java | 5 +- .../cancel/CancelTasksRequestBuilder.java | 4 +- .../node/tasks/get/GetTaskRequestBuilder.java | 4 +- .../tasks/list/ListTasksRequestBuilder.java | 5 +- .../CleanupRepositoryRequestBuilder.java | 5 +- .../DeleteRepositoryRequestBuilder.java | 4 +- .../get/GetRepositoriesRequestBuilder.java | 4 +- .../put/PutRepositoryRequestBuilder.java | 4 +- .../VerifyRepositoryRequestBuilder.java | 4 +- .../reroute/ClusterRerouteRequestBuilder.java | 4 +- .../ClusterUpdateSettingsRequestBuilder.java | 4 +- .../ClusterSearchShardsRequestBuilder.java | 4 +- .../clone/CloneSnapshotRequestBuilder.java | 19 +- .../create/CreateSnapshotRequestBuilder.java | 4 +- .../delete/DeleteSnapshotRequestBuilder.java | 4 +- .../get/GetSnapshotsRequestBuilder.java | 4 +- .../RestoreSnapshotRequestBuilder.java | 4 +- .../status/SnapshotsStatusRequestBuilder.java | 8 +- .../state/ClusterStateRequestBuilder.java | 4 +- .../stats/ClusterStatsRequestBuilder.java | 4 +- .../DeleteStoredScriptRequestBuilder.java | 4 +- .../GetStoredScriptRequestBuilder.java | 4 +- .../PutStoredScriptRequestBuilder.java | 4 +- .../PendingClusterTasksRequestBuilder.java | 4 +- .../alias/IndicesAliasesRequestBuilder.java | 4 +- .../alias/get/GetAliasesRequestBuilder.java | 4 +- .../analyze/AnalyzeRequestBuilder.java | 8 +- .../ClearIndicesCacheRequestBuilder.java | 4 +- .../close/CloseIndexRequestBuilder.java | 4 +- .../create/CreateIndexRequestBuilder.java | 8 +- .../delete/DeleteIndexRequestBuilder.java | 4 +- .../indices/flush/FlushRequestBuilder.java | 4 +- .../forcemerge/ForceMergeRequestBuilder.java | 4 +- .../indices/get/GetIndexRequestBuilder.java | 4 +- .../get/GetFieldMappingsRequestBuilder.java | 4 +- .../get/GetMappingsRequestBuilder.java | 4 +- .../mapping/put/PutMappingRequestBuilder.java | 4 +- .../indices/open/OpenIndexRequestBuilder.java | 4 +- .../readonly/AddIndexBlockRequestBuilder.java | 4 +- .../recovery/RecoveryRequestBuilder.java | 4 +- .../refresh/RefreshRequestBuilder.java | 4 +- .../rollover/RolloverRequestBuilder.java | 4 +- .../IndicesSegmentsRequestBuilder.java | 4 +- .../get/GetSettingsRequestBuilder.java | 4 +- .../put/UpdateSettingsRequestBuilder.java | 4 +- .../IndicesShardStoreRequestBuilder.java | 5 +- .../indices/shrink/ResizeRequestBuilder.java | 5 +- .../stats/IndicesStatsRequestBuilder.java | 4 +- .../DeleteIndexTemplateRequestBuilder.java | 4 +- .../get/GetIndexTemplatesRequestBuilder.java | 4 +- .../put/PutIndexTemplateRequestBuilder.java | 4 +- .../query/ValidateQueryRequestBuilder.java | 4 +- .../action/bulk/BulkRequestBuilder.java | 8 +- .../action/delete/DeleteRequestBuilder.java | 4 +- .../action/explain/ExplainRequestBuilder.java | 5 +- .../FieldCapabilitiesRequestBuilder.java | 4 +- .../action/get/GetRequestBuilder.java | 8 +- .../action/get/MultiGetRequestBuilder.java | 4 +- .../action/index/IndexRequestBuilder.java | 8 +- .../ingest/DeletePipelineRequestBuilder.java | 4 +- .../ingest/GetPipelineRequestBuilder.java | 4 +- .../ingest/PutPipelineRequestBuilder.java | 10 +- .../SimulatePipelineRequestBuilder.java | 9 +- .../search/ClearScrollRequestBuilder.java | 5 +- .../search/MultiSearchRequestBuilder.java | 5 +- .../action/search/SearchRequestBuilder.java | 5 +- .../search/SearchScrollRequestBuilder.java | 9 +- .../MultiTermVectorsRequestBuilder.java | 4 +- .../TermVectorsRequestBuilder.java | 8 +- .../action/update/UpdateRequestBuilder.java | 8 +- .../internal/support/AbstractClient.java | 164 +++++++++--------- .../reindex/DeleteByQueryRequestBuilder.java | 10 +- .../index/reindex/ReindexRequestBuilder.java | 21 +-- .../reindex/UpdateByQueryRequestBuilder.java | 10 +- .../CreateIndexRequestBuilderTests.java | 4 +- .../get/TransportMultiGetActionTests.java | 4 +- .../index/IndexRequestBuilderTests.java | 2 +- .../search/SearchRequestBuilderTests.java | 2 +- .../TransportMultiTermVectorsActionTests.java | 4 +- .../vectors/KnnSearchRequestParserTests.java | 3 +- .../license/DeleteLicenseRequestBuilder.java | 11 +- .../license/PutLicenseRequestBuilder.java | 8 +- .../core/action/XPackInfoRequestBuilder.java | 6 +- .../action/GraphExploreRequestBuilder.java | 4 +- .../action/role/PutRoleRequestBuilder.java | 6 +- .../token/CreateTokenRequestBuilder.java | 5 +- .../action/user/DeleteUserRequestBuilder.java | 6 +- .../action/user/GetUsersRequestBuilder.java | 6 +- .../action/user/PutUserRequestBuilder.java | 6 +- .../ssl/action/GetCertificateInfoAction.java | 7 +- .../rest/RestGetCertificateInfoAction.java | 12 +- .../license/LicensesTransportTests.java | 33 ++-- .../core/ml/utils/MlIndexAndAliasTests.java | 6 +- .../xpack/eql/analysis/CancellationTests.java | 3 +- .../xpack/esql/action/EsqlActionIT.java | 5 +- .../xpack/esql/action/EsqlActionTaskIT.java | 4 +- .../esql/action/EsqlQueryRequestBuilder.java | 8 +- .../xpack/graph/test/GraphTests.java | 28 ++- .../registry/ModelRegistryTests.java | 3 +- .../ml/integration/MlNativeIntegTestCase.java | 8 +- .../aggregation/AggregationDataExtractor.java | 3 +- .../AggregationDataExtractorFactory.java | 3 +- .../chunked/ChunkedDataExtractor.java | 3 +- .../extractor/scroll/ScrollDataExtractor.java | 8 +- .../extractor/DataFrameDataExtractor.java | 6 +- ...ransportDeleteTrainedModelActionTests.java | 2 +- ...ompositeAggregationDataExtractorTests.java | 4 +- .../persistence/JobResultsProviderTests.java | 16 +- .../xpack/ml/utils/TaskRetrieverTests.java | 2 +- .../indices/IndexRecoveryCollectorTests.java | 9 +- .../indices/IndexStatsCollectorTests.java | 9 +- .../security/authc/ApiKeyIntegTests.java | 7 +- .../authc/apikey/ApiKeySingleNodeTests.java | 7 +- .../store/NativePrivilegeStoreCacheTests.java | 11 +- .../NativePrivilegeStoreSingleNodeTests.java | 11 +- ...ansportOpenIdConnectLogoutActionTests.java | 11 +- .../saml/TransportSamlLogoutActionTests.java | 14 +- .../TransportCreateTokenActionTests.java | 13 +- .../security/authc/ApiKeyServiceTests.java | 23 ++- .../authc/AuthenticationServiceTests.java | 13 +- .../security/authc/TokenServiceTests.java | 14 +- .../mapper/NativeRoleMappingStoreTests.java | 13 +- .../security/profile/ProfileServiceTests.java | 41 +---- .../xpack/security/test/SecurityMocks.java | 7 +- .../action/SqlClearCursorRequestBuilder.java | 4 +- .../sql/action/SqlQueryRequestBuilder.java | 6 +- .../action/SqlTranslateRequestBuilder.java | 6 +- .../sql/action/AsyncSqlSearchActionIT.java | 8 +- .../xpack/sql/action/SqlActionIT.java | 11 +- .../xpack/sql/action/SqlCancellationIT.java | 4 +- .../sql/action/SqlClearCursorActionIT.java | 18 +- .../xpack/sql/action/SqlLicenseIT.java | 15 +- .../sql/action/SqlSearchPageTimeoutIT.java | 4 +- .../sql/action/SqlTranslateActionIT.java | 5 +- .../xpack/sql/analysis/CancellationTests.java | 10 +- 149 files changed, 484 insertions(+), 759 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequestBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequestBuilder.java index 6bed28f84bdc8..5fb60b6d2a085 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequestBuilder.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequestBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.script.ScriptType; @@ -18,12 +17,8 @@ public class SearchTemplateRequestBuilder extends ActionRequestBuilder { - SearchTemplateRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new SearchTemplateRequest()); - } - public SearchTemplateRequestBuilder(ElasticsearchClient client) { - this(client, MustachePlugin.SEARCH_TEMPLATE_ACTION); + super(client, MustachePlugin.SEARCH_TEMPLATE_ACTION, new SearchTemplateRequest()); } public SearchTemplateRequestBuilder setRequest(SearchRequest searchRequest) { diff --git a/modules/rank-eval/src/internalClusterTest/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index 699cb307e3310..244c3f2188237 100644 --- a/modules/rank-eval/src/internalClusterTest/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/internalClusterTest/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -85,7 +85,7 @@ public void testPrecisionAtRequest() { PrecisionAtK metric = new PrecisionAtK(1, false, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalPlugin.ACTION, new RankEvalRequest()); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), new RankEvalRequest()); builder.setRankEvalSpec(task); String indexToUse = randomBoolean() ? TEST_INDEX : INDEX_ALIAS; @@ -130,7 +130,7 @@ public void testPrecisionAtRequest() { metric = new PrecisionAtK(1, false, 3); task = new RankEvalSpec(specifications, metric); - builder = new RankEvalRequestBuilder(client(), RankEvalPlugin.ACTION, new RankEvalRequest(task, new String[] { TEST_INDEX })); + builder = new RankEvalRequestBuilder(client(), new RankEvalRequest(task, new String[] { TEST_INDEX })); response = client().execute(RankEvalPlugin.ACTION, builder.request()).actionGet(); // if we look only at top 3 documente, the expected P@3 for the first query is @@ -162,11 +162,7 @@ public void testDCGRequest() { DiscountedCumulativeGain metric = new DiscountedCumulativeGain(false, null, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder( - client(), - RankEvalPlugin.ACTION, - new RankEvalRequest(task, new String[] { TEST_INDEX }) - ); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), new RankEvalRequest(task, new String[] { TEST_INDEX })); RankEvalResponse response = client().execute(RankEvalPlugin.ACTION, builder.request()).actionGet(); assertEquals(DiscountedCumulativeGainTests.EXPECTED_DCG, response.getMetricScore(), 10E-14); @@ -175,7 +171,7 @@ public void testDCGRequest() { metric = new DiscountedCumulativeGain(false, null, 3); task = new RankEvalSpec(specifications, metric); - builder = new RankEvalRequestBuilder(client(), RankEvalPlugin.ACTION, new RankEvalRequest(task, new String[] { TEST_INDEX })); + builder = new RankEvalRequestBuilder(client(), new RankEvalRequest(task, new String[] { TEST_INDEX })); response = client().execute(RankEvalPlugin.ACTION, builder.request()).actionGet(); assertEquals(12.39278926071437, response.getMetricScore(), 10E-14); @@ -193,11 +189,7 @@ public void testMRRRequest() { MeanReciprocalRank metric = new MeanReciprocalRank(1, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder( - client(), - RankEvalPlugin.ACTION, - new RankEvalRequest(task, new String[] { TEST_INDEX }) - ); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), new RankEvalRequest(task, new String[] { TEST_INDEX })); RankEvalResponse response = client().execute(RankEvalPlugin.ACTION, builder.request()).actionGet(); // the expected reciprocal rank for the amsterdam_query is 1/5 @@ -210,7 +202,7 @@ public void testMRRRequest() { metric = new MeanReciprocalRank(1, 3); task = new RankEvalSpec(specifications, metric); - builder = new RankEvalRequestBuilder(client(), RankEvalPlugin.ACTION, new RankEvalRequest(task, new String[] { TEST_INDEX })); + builder = new RankEvalRequestBuilder(client(), new RankEvalRequest(task, new String[] { TEST_INDEX })); response = client().execute(RankEvalPlugin.ACTION, builder.request()).actionGet(); // limiting to top 3 results, the amsterdam_query has no relevant document in it @@ -238,11 +230,7 @@ public void testBadQuery() { RankEvalSpec task = new RankEvalSpec(specifications, new PrecisionAtK()); - RankEvalRequestBuilder builder = new RankEvalRequestBuilder( - client(), - RankEvalPlugin.ACTION, - new RankEvalRequest(task, new String[] { TEST_INDEX }) - ); + RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), new RankEvalRequest(task, new String[] { TEST_INDEX })); builder.setRankEvalSpec(task); RankEvalResponse response = client().execute(RankEvalPlugin.ACTION, builder.request()).actionGet(); diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java index 0f56261f42c08..db80a7b598118 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java @@ -9,13 +9,12 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.ElasticsearchClient; public class RankEvalRequestBuilder extends ActionRequestBuilder { - public RankEvalRequestBuilder(ElasticsearchClient client, ActionType action, RankEvalRequest request) { - super(client, action, request); + public RankEvalRequestBuilder(ElasticsearchClient client, RankEvalRequest request) { + super(client, RankEvalPlugin.ACTION, request); } public void setRankEvalSpec(RankEvalSpec spec) { diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java index 996fbde85e474..46271f8c61e9c 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.index.reindex.AbstractBulkByScrollRequestBuilder; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.BulkByScrollTask; -import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequestBuilder; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequestBuilder; @@ -71,7 +70,7 @@ public void testReindex() { Client client = client(); // tag::reindex1 BulkByScrollResponse response = - new ReindexRequestBuilder(client, ReindexAction.INSTANCE) + new ReindexRequestBuilder(client) .source("source_index") .destination("target_index") .filter(QueryBuilders.matchQuery("category", "xzy")) // <1> @@ -88,7 +87,7 @@ public void testUpdateByQuery() { { // tag::update-by-query UpdateByQueryRequestBuilder updateByQuery = - new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE); + new UpdateByQueryRequestBuilder(client); updateByQuery.source("source_index").abortOnVersionConflict(false); BulkByScrollResponse response = updateByQuery.get(); // end::update-by-query @@ -96,7 +95,7 @@ public void testUpdateByQuery() { { // tag::update-by-query-filter UpdateByQueryRequestBuilder updateByQuery = - new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE); + new UpdateByQueryRequestBuilder(client); updateByQuery.source("source_index") .filter(QueryBuilders.termQuery("level", "awesome")) .maxDocs(1000) @@ -113,7 +112,7 @@ public void testUpdateByQuery() { { // tag::update-by-query-size UpdateByQueryRequestBuilder updateByQuery = - new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE); + new UpdateByQueryRequestBuilder(client); updateByQuery.source("source_index") .source() .setSize(500); @@ -123,7 +122,7 @@ public void testUpdateByQuery() { { // tag::update-by-query-sort UpdateByQueryRequestBuilder updateByQuery = - new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE); + new UpdateByQueryRequestBuilder(client); updateByQuery.source("source_index") .maxDocs(100) .source() @@ -134,7 +133,7 @@ public void testUpdateByQuery() { { // tag::update-by-query-script UpdateByQueryRequestBuilder updateByQuery = - new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE); + new UpdateByQueryRequestBuilder(client); updateByQuery.source("source_index") .script(new Script( ScriptType.INLINE, @@ -155,7 +154,7 @@ public void testUpdateByQuery() { { // tag::update-by-query-multi-index UpdateByQueryRequestBuilder updateByQuery = - new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE); + new UpdateByQueryRequestBuilder(client); updateByQuery.source("foo", "bar"); BulkByScrollResponse response = updateByQuery.get(); // end::update-by-query-multi-index @@ -163,7 +162,7 @@ public void testUpdateByQuery() { { // tag::update-by-query-routing UpdateByQueryRequestBuilder updateByQuery = - new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE); + new UpdateByQueryRequestBuilder(client); updateByQuery.source().setRouting("cat"); BulkByScrollResponse response = updateByQuery.get(); // end::update-by-query-routing @@ -171,7 +170,7 @@ public void testUpdateByQuery() { { // tag::update-by-query-pipeline UpdateByQueryRequestBuilder updateByQuery = - new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE); + new UpdateByQueryRequestBuilder(client); updateByQuery.setPipeline("hurray"); BulkByScrollResponse response = updateByQuery.get(); // end::update-by-query-pipeline @@ -216,7 +215,7 @@ public void testTasks() throws Exception { } { // tag::update-by-query-rethrottle - new RethrottleRequestBuilder(client, ReindexPlugin.RETHROTTLE_ACTION) + new RethrottleRequestBuilder(client) .setTargetTaskId(taskId) .setRequestsPerSecond(2.0f) .get(); @@ -234,7 +233,7 @@ public void testDeleteByQuery() { // tag::delete-by-query-sync BulkByScrollResponse response = - new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + new DeleteByQueryRequestBuilder(client) .filter(QueryBuilders.matchQuery("gender", "male")) // <1> .source("persons") // <2> .get(); // <3> @@ -242,7 +241,7 @@ public void testDeleteByQuery() { // end::delete-by-query-sync // tag::delete-by-query-async - new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE) + new DeleteByQueryRequestBuilder(client) .filter(QueryBuilders.matchQuery("gender", "male")) // <1> .source("persons") // <2> .execute(new ActionListener() { // <3> @@ -280,8 +279,7 @@ private ReindexRequestBuilder reindexAndPartiallyBlock() throws Exception { assertHitCount(prepareSearch(INDEX_NAME).setSize(0), numDocs); assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0)); - ReindexRequestBuilder builder = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source(INDEX_NAME) - .destination("target_index"); + ReindexRequestBuilder builder = new ReindexRequestBuilder(client).source(INDEX_NAME).destination("target_index"); // Scroll by 1 so that cancellation is easier to control builder.source().setSize(1); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java index 2c4f9fa574f38..50dea29683540 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java @@ -57,9 +57,7 @@ public void testReindexFromRemoteGivenIndexExists() throws Exception { final int docsNumber = indexDocs(client(REMOTE_CLUSTER), "source-index-001"); final String sourceIndexInRemote = REMOTE_CLUSTER + ":" + "source-index-001"; - new ReindexRequestBuilder(client(LOCAL_CLUSTER), ReindexAction.INSTANCE).source(sourceIndexInRemote) - .destination("desc-index-001") - .get(); + new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("desc-index-001").get(); assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("desc-index-001") @@ -76,9 +74,7 @@ public void testReindexFromRemoteGivenSameIndexNames() throws Exception { final int docsNumber = indexDocs(client(REMOTE_CLUSTER), "test-index-001"); final String sourceIndexInRemote = REMOTE_CLUSTER + ":" + "test-index-001"; - new ReindexRequestBuilder(client(LOCAL_CLUSTER), ReindexAction.INSTANCE).source(sourceIndexInRemote) - .destination("test-index-001") - .get(); + new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("test-index-001").get(); assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("test-index-001") @@ -99,9 +95,9 @@ public void testReindexManyTimesFromRemoteGivenSameIndexNames() throws Exception int N = randomIntBetween(2, 10); for (int attempt = 0; attempt < N; attempt++) { - BulkByScrollResponse response = new ReindexRequestBuilder(client(LOCAL_CLUSTER), ReindexAction.INSTANCE).source( - sourceIndexInRemote - ).destination("test-index-001").get(); + BulkByScrollResponse response = new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote) + .destination("test-index-001") + .get(); if (attempt == 0) { assertThat(response.getCreated(), equalTo(docsNumber)); @@ -127,9 +123,7 @@ public void testReindexFromRemoteThrowOnUnavailableIndex() throws Exception { final String sourceIndexInRemote = REMOTE_CLUSTER + ":" + "no-such-source-index-001"; expectThrows( IndexNotFoundException.class, - () -> new ReindexRequestBuilder(client(LOCAL_CLUSTER), ReindexAction.INSTANCE).source(sourceIndexInRemote) - .destination("desc-index-001") - .get() + () -> new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("desc-index-001").get() ); // assert that local index was not created either @@ -145,9 +139,7 @@ public void testReindexFromRemoteGivenSimpleDateMathIndexName() throws Interrupt final int docsNumber = indexDocs(client(REMOTE_CLUSTER), "datemath-2001-01-02"); final String sourceIndexInRemote = REMOTE_CLUSTER + ":" + ""; - new ReindexRequestBuilder(client(LOCAL_CLUSTER), ReindexAction.INSTANCE).source(sourceIndexInRemote) - .destination("desc-index-001") - .get(); + new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("desc-index-001").get(); assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("desc-index-001") @@ -165,9 +157,7 @@ public void testReindexFromRemoteGivenComplexDateMathIndexName() throws Interrup // Remote name contains `:` symbol twice final String sourceIndexInRemote = REMOTE_CLUSTER + ":" + ""; - new ReindexRequestBuilder(client(LOCAL_CLUSTER), ReindexAction.INSTANCE).source(sourceIndexInRemote) - .destination("desc-index-001") - .get(); + new ReindexRequestBuilder(client(LOCAL_CLUSTER)).source(sourceIndexInRemote).destination("desc-index-001").get(); assertTrue("Number of documents in source and desc indexes does not match", waitUntil(() -> { SearchResponse resp = client(LOCAL_CLUSTER).prepareSearch("desc-index-001") diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/RethrottleRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/RethrottleRequestBuilder.java index 28f084d59f1f6..bf5c68882e3dc 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/RethrottleRequestBuilder.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/RethrottleRequestBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.reindex; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.tasks.TasksRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -17,8 +16,8 @@ * Java API support for changing the throttle on reindex tasks while they are running. */ public class RethrottleRequestBuilder extends TasksRequestBuilder { - public RethrottleRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new RethrottleRequest()); + public RethrottleRequestBuilder(ElasticsearchClient client) { + super(client, ReindexPlugin.RETHROTTLE_ACTION, new RethrottleRequest()); } /** diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFromRemoteWithAuthTests.java index a68c390b8bd80..c641e04f84134 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFromRemoteWithAuthTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.http.HttpInfo; -import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequestBuilder; import org.elasticsearch.index.reindex.RemoteInfo; import org.elasticsearch.plugins.ActionPlugin; @@ -109,14 +108,14 @@ private RemoteInfo newRemoteInfo(String username, String password, Map request.get()); @@ -125,7 +124,7 @@ public void testReindexSendsHeaders() throws Exception { } public void testReindexWithoutAuthenticationWhenRequired() throws Exception { - ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source") + ReindexRequestBuilder request = new ReindexRequestBuilder(client()).source("source") .destination("dest") .setRemoteInfo(newRemoteInfo(null, null, emptyMap())); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> request.get()); @@ -135,7 +134,7 @@ public void testReindexWithoutAuthenticationWhenRequired() throws Exception { } public void testReindexWithBadAuthentication() throws Exception { - ReindexRequestBuilder request = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source") + ReindexRequestBuilder request = new ReindexRequestBuilder(client()).source("source") .destination("dest") .setRemoteInfo(newRemoteInfo("junk", "auth", emptyMap())); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> request.get()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java index 0804cccd8b8f2..855cb1863f399 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexSingleNodeTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.reindex; import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequestBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.sort.SortOrder; @@ -38,9 +37,7 @@ public void testDeprecatedSort() { // Copy a subset of the docs sorted int subsetSize = randomIntBetween(1, max - 1); - ReindexRequestBuilder copy = new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("source") - .destination("dest") - .refresh(true); + ReindexRequestBuilder copy = new ReindexRequestBuilder(client()).source("source").destination("dest").refresh(true); copy.maxDocs(subsetSize); copy.request().addSortField("foo", SortOrder.DESC); assertThat(copy.get(), matcher().created(subsetSize)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexTestCase.java index cf6eb4b8aa888..129d23c010917 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexTestCase.java @@ -9,11 +9,8 @@ package org.elasticsearch.reindex; import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest; -import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequestBuilder; -import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequestBuilder; -import org.elasticsearch.index.reindex.UpdateByQueryAction; import org.elasticsearch.index.reindex.UpdateByQueryRequestBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -38,19 +35,19 @@ protected Collection> nodePlugins() { } protected ReindexRequestBuilder reindex() { - return new ReindexRequestBuilder(client(), ReindexAction.INSTANCE); + return new ReindexRequestBuilder(client()); } protected UpdateByQueryRequestBuilder updateByQuery() { - return new UpdateByQueryRequestBuilder(client(), UpdateByQueryAction.INSTANCE); + return new UpdateByQueryRequestBuilder(client()); } protected DeleteByQueryRequestBuilder deleteByQuery() { - return new DeleteByQueryRequestBuilder(client(), DeleteByQueryAction.INSTANCE); + return new DeleteByQueryRequestBuilder(client()); } protected RethrottleRequestBuilder rethrottle() { - return new RethrottleRequestBuilder(client(), ReindexPlugin.RETHROTTLE_ACTION); + return new RethrottleRequestBuilder(client()); } public static BulkIndexByScrollResponseMatcher matcher() { diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/RetryTests.java index 36da25685a7ba..36fe08afc1ef5 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/RetryTests.java @@ -97,7 +97,7 @@ final Settings nodeSettings() { public void testReindex() throws Exception { testCase( ReindexAction.NAME, - client -> new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source").destination("dest"), + client -> new ReindexRequestBuilder(client).source("source").destination("dest"), matcher().created(DOC_COUNT) ); } @@ -129,9 +129,7 @@ public void testReindexFromRemote() throws Exception { RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT ); - ReindexRequestBuilder request = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source") - .destination("dest") - .setRemoteInfo(remote); + ReindexRequestBuilder request = new ReindexRequestBuilder(client).source("source").destination("dest").setRemoteInfo(remote); return request; }; testCase(ReindexAction.NAME, function, matcher().created(DOC_COUNT)); @@ -140,7 +138,7 @@ public void testReindexFromRemote() throws Exception { public void testUpdateByQuery() throws Exception { testCase( UpdateByQueryAction.NAME, - client -> new UpdateByQueryRequestBuilder(client, UpdateByQueryAction.INSTANCE).source("source"), + client -> new UpdateByQueryRequestBuilder(client).source("source"), matcher().updated(DOC_COUNT) ); } @@ -148,8 +146,7 @@ public void testUpdateByQuery() throws Exception { public void testDeleteByQuery() throws Exception { testCase( DeleteByQueryAction.NAME, - client -> new DeleteByQueryRequestBuilder(client, DeleteByQueryAction.INSTANCE).source("source") - .filter(QueryBuilders.matchAllQuery()), + client -> new DeleteByQueryRequestBuilder(client).source("source").filter(QueryBuilders.matchAllQuery()), matcher().deleted(DOC_COUNT) ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequestBuilder.java index 0d39ecb8f227a..e69c8862a4885 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequestBuilder.java @@ -21,8 +21,8 @@ public class ClusterHealthRequestBuilder extends MasterNodeReadOperationRequestB ClusterHealthResponse, ClusterHealthRequestBuilder> { - public ClusterHealthRequestBuilder(ElasticsearchClient client, ClusterHealthAction action) { - super(client, action, new ClusterHealthRequest()); + public ClusterHealthRequestBuilder(ElasticsearchClient client) { + super(client, ClusterHealthAction.INSTANCE, new ClusterHealthRequest()); } public ClusterHealthRequestBuilder setIndices(String... indices) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java index a1276e164741c..6593b90fb7f65 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.cluster.node.hotthreads; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.core.TimeValue; @@ -19,8 +18,8 @@ public class NodesHotThreadsRequestBuilder extends NodesOperationRequestBuilder< NodesHotThreadsResponse, NodesHotThreadsRequestBuilder> { - public NodesHotThreadsRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new NodesHotThreadsRequest()); + public NodesHotThreadsRequestBuilder(ElasticsearchClient client) { + super(client, TransportNodesHotThreadsAction.TYPE, new NodesHotThreadsRequest()); } public NodesHotThreadsRequestBuilder setThreads(int threads) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java index 52f3ced207be3..ad6233717a334 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequestBuilder.java @@ -8,15 +8,14 @@ package org.elasticsearch.action.admin.cluster.node.info; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; // TODO: This class's interface should match that of NodesInfoRequest public class NodesInfoRequestBuilder extends NodesOperationRequestBuilder { - public NodesInfoRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new NodesInfoRequest()); + public NodesInfoRequestBuilder(ElasticsearchClient client) { + super(client, TransportNodesInfoAction.TYPE, new NodesInfoRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java index 4f24593cbb55b..95c5d53ad7fbc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java @@ -20,8 +20,8 @@ public class NodesReloadSecureSettingsRequestBuilder extends NodesOperationReque NodesReloadSecureSettingsResponse, NodesReloadSecureSettingsRequestBuilder> { - public NodesReloadSecureSettingsRequestBuilder(ElasticsearchClient client, NodesReloadSecureSettingsAction action) { - super(client, action, new NodesReloadSecureSettingsRequest()); + public NodesReloadSecureSettingsRequestBuilder(ElasticsearchClient client) { + super(client, NodesReloadSecureSettingsAction.INSTANCE, new NodesReloadSecureSettingsRequest()); } public NodesReloadSecureSettingsRequestBuilder setSecureStorePassword(SecureString secureStorePassword) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestBuilder.java index 48f90ccc712fd..ab7278c629bf2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.cluster.node.stats; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -18,8 +17,8 @@ public class NodesStatsRequestBuilder extends NodesOperationRequestBuilder< NodesStatsResponse, NodesStatsRequestBuilder> { - public NodesStatsRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new NodesStatsRequest()); + public NodesStatsRequestBuilder(ElasticsearchClient client) { + super(client, TransportNodesStatsAction.TYPE, new NodesStatsRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java index e41fa85e843f7..45fc4e352a4ba 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java @@ -16,8 +16,8 @@ */ public class CancelTasksRequestBuilder extends TasksRequestBuilder { - public CancelTasksRequestBuilder(ElasticsearchClient client, CancelTasksAction action) { - super(client, action, new CancelTasksRequest()); + public CancelTasksRequestBuilder(ElasticsearchClient client) { + super(client, CancelTasksAction.INSTANCE, new CancelTasksRequest()); } public CancelTasksRequestBuilder waitForCompletion(boolean waitForCompletion) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java index 01f7a201a8c5d..49eacd0996111 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java @@ -17,8 +17,8 @@ * Builder for the request to retrieve the list of tasks running on the specified nodes */ public class GetTaskRequestBuilder extends ActionRequestBuilder { - public GetTaskRequestBuilder(ElasticsearchClient client, GetTaskAction action) { - super(client, action, new GetTaskRequest()); + public GetTaskRequestBuilder(ElasticsearchClient client) { + super(client, GetTaskAction.INSTANCE, new GetTaskRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java index cf50f9bca581f..f12b5cf6e202e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.cluster.node.tasks.list; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.tasks.TasksRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -17,8 +16,8 @@ */ public class ListTasksRequestBuilder extends TasksRequestBuilder { - public ListTasksRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new ListTasksRequest()); + public ListTasksRequestBuilder(ElasticsearchClient client) { + super(client, TransportListTasksAction.TYPE, new ListTasksRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java index 83064a6348d4c..680502c783a8b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.action.admin.cluster.repositories.cleanup; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -16,8 +15,8 @@ public class CleanupRepositoryRequestBuilder extends MasterNodeOperationRequestB CleanupRepositoryResponse, CleanupRepositoryRequestBuilder> { - public CleanupRepositoryRequestBuilder(ElasticsearchClient client, ActionType action, String repository) { - super(client, action, new CleanupRepositoryRequest(repository)); + public CleanupRepositoryRequestBuilder(ElasticsearchClient client, String repository) { + super(client, CleanupRepositoryAction.INSTANCE, new CleanupRepositoryRequest(repository)); } public CleanupRepositoryRequestBuilder setName(String repository) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryRequestBuilder.java index 412a34a6e1562..e2f614246b81c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryRequestBuilder.java @@ -23,8 +23,8 @@ public class DeleteRepositoryRequestBuilder extends AcknowledgedRequestBuilder< /** * Constructs unregister repository request builder with specified repository name */ - public DeleteRepositoryRequestBuilder(ElasticsearchClient client, DeleteRepositoryAction action, String name) { - super(client, action, new DeleteRepositoryRequest(name)); + public DeleteRepositoryRequestBuilder(ElasticsearchClient client, String name) { + super(client, DeleteRepositoryAction.INSTANCE, new DeleteRepositoryRequest(name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequestBuilder.java index 6a0d4a5e126f1..f9d0c429c2ee8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesRequestBuilder.java @@ -23,8 +23,8 @@ public class GetRepositoriesRequestBuilder extends MasterNodeReadOperationReques /** * Creates new get repository request builder */ - public GetRepositoriesRequestBuilder(ElasticsearchClient client, GetRepositoriesAction action, String... repositories) { - super(client, action, new GetRepositoriesRequest(repositories)); + public GetRepositoriesRequestBuilder(ElasticsearchClient client, String... repositories) { + super(client, GetRepositoriesAction.INSTANCE, new GetRepositoriesRequest(repositories)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestBuilder.java index 21401ba986674..79195725ad962 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestBuilder.java @@ -27,8 +27,8 @@ public class PutRepositoryRequestBuilder extends AcknowledgedRequestBuilder< /** * Constructs register repository request for the repository with a given name */ - public PutRepositoryRequestBuilder(ElasticsearchClient client, PutRepositoryAction action, String name) { - super(client, action, new PutRepositoryRequest(name)); + public PutRepositoryRequestBuilder(ElasticsearchClient client, String name) { + super(client, PutRepositoryAction.INSTANCE, new PutRepositoryRequest(name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryRequestBuilder.java index dc6257b222ab2..798fad15734ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryRequestBuilder.java @@ -22,8 +22,8 @@ public class VerifyRepositoryRequestBuilder extends MasterNodeOperationRequestBu /** * Constructs unregister repository request builder with specified repository name */ - public VerifyRepositoryRequestBuilder(ElasticsearchClient client, VerifyRepositoryAction action, String name) { - super(client, action, new VerifyRepositoryRequest(name)); + public VerifyRepositoryRequestBuilder(ElasticsearchClient client, String name) { + super(client, VerifyRepositoryAction.INSTANCE, new VerifyRepositoryRequest(name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestBuilder.java index f8e5282df6fcd..1926378dba791 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestBuilder.java @@ -19,8 +19,8 @@ public class ClusterRerouteRequestBuilder extends AcknowledgedRequestBuilder< ClusterRerouteRequest, ClusterRerouteResponse, ClusterRerouteRequestBuilder> { - public ClusterRerouteRequestBuilder(ElasticsearchClient client, ClusterRerouteAction action) { - super(client, action, new ClusterRerouteRequest()); + public ClusterRerouteRequestBuilder(ElasticsearchClient client) { + super(client, ClusterRerouteAction.INSTANCE, new ClusterRerouteRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java index 8f0a6e3f2a814..31ab3223bbb49 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java @@ -23,8 +23,8 @@ public class ClusterUpdateSettingsRequestBuilder extends AcknowledgedRequestBuil ClusterUpdateSettingsResponse, ClusterUpdateSettingsRequestBuilder> { - public ClusterUpdateSettingsRequestBuilder(ElasticsearchClient client, ClusterUpdateSettingsAction action) { - super(client, action, new ClusterUpdateSettingsRequest()); + public ClusterUpdateSettingsRequestBuilder(ElasticsearchClient client) { + super(client, ClusterUpdateSettingsAction.INSTANCE, new ClusterUpdateSettingsRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java index 53e0103d5360d..9f11a01fc4073 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestBuilder.java @@ -17,8 +17,8 @@ public class ClusterSearchShardsRequestBuilder extends MasterNodeReadOperationRe ClusterSearchShardsResponse, ClusterSearchShardsRequestBuilder> { - public ClusterSearchShardsRequestBuilder(ElasticsearchClient client, ClusterSearchShardsAction action) { - super(client, action, new ClusterSearchShardsRequest()); + public ClusterSearchShardsRequestBuilder(ElasticsearchClient client) { + super(client, ClusterSearchShardsAction.INSTANCE, new ClusterSearchShardsRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequestBuilder.java index 1275a8cbfdfaa..efa4c4895a12e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/clone/CloneSnapshotRequestBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.clone; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; @@ -20,22 +19,8 @@ public class CloneSnapshotRequestBuilder extends MasterNodeOperationRequestBuild AcknowledgedResponse, CloneSnapshotRequestBuilder> { - protected CloneSnapshotRequestBuilder( - ElasticsearchClient client, - ActionType action, - CloneSnapshotRequest request - ) { - super(client, action, request); - } - - public CloneSnapshotRequestBuilder( - ElasticsearchClient client, - ActionType action, - String repository, - String source, - String target - ) { - this(client, action, new CloneSnapshotRequest(repository, source, target, Strings.EMPTY_ARRAY)); + public CloneSnapshotRequestBuilder(ElasticsearchClient client, String repository, String source, String target) { + super(client, CloneSnapshotAction.INSTANCE, new CloneSnapshotRequest(repository, source, target, Strings.EMPTY_ARRAY)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java index ae6ec9a5b3c49..b2cd01b4d2016 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestBuilder.java @@ -26,8 +26,8 @@ public class CreateSnapshotRequestBuilder extends MasterNodeOperationRequestBuil /** * Constructs a new create snapshot request builder with specified repository and snapshot names */ - public CreateSnapshotRequestBuilder(ElasticsearchClient client, CreateSnapshotAction action, String repository, String snapshot) { - super(client, action, new CreateSnapshotRequest(repository, snapshot)); + public CreateSnapshotRequestBuilder(ElasticsearchClient client, String repository, String snapshot) { + super(client, CreateSnapshotAction.INSTANCE, new CreateSnapshotRequest(repository, snapshot)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java index 4046c0bc7dd03..8d2c8997b42e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequestBuilder.java @@ -23,8 +23,8 @@ public class DeleteSnapshotRequestBuilder extends MasterNodeOperationRequestBuil /** * Constructs delete snapshot request builder with specified repository and snapshot names */ - public DeleteSnapshotRequestBuilder(ElasticsearchClient client, DeleteSnapshotAction action, String repository, String... snapshots) { - super(client, action, new DeleteSnapshotRequest(repository, snapshots)); + public DeleteSnapshotRequestBuilder(ElasticsearchClient client, String repository, String... snapshots) { + super(client, DeleteSnapshotAction.INSTANCE, new DeleteSnapshotRequest(repository, snapshots)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequestBuilder.java index 1688567975b6d..eadbaa8aa0952 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequestBuilder.java @@ -25,8 +25,8 @@ public class GetSnapshotsRequestBuilder extends MasterNodeOperationRequestBuilde /** * Constructs the new get snapshot request with specified repositories */ - public GetSnapshotsRequestBuilder(ElasticsearchClient client, GetSnapshotsAction action, String... repositories) { - super(client, action, new GetSnapshotsRequest(repositories)); + public GetSnapshotsRequestBuilder(ElasticsearchClient client, String... repositories) { + super(client, GetSnapshotsAction.INSTANCE, new GetSnapshotsRequest(repositories)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java index 49cc5df049332..0dad986a86ab6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java @@ -28,8 +28,8 @@ public class RestoreSnapshotRequestBuilder extends MasterNodeOperationRequestBui /** * Constructs new restore snapshot request builder with specified repository and snapshot names */ - public RestoreSnapshotRequestBuilder(ElasticsearchClient client, RestoreSnapshotAction action, String repository, String name) { - super(client, action, new RestoreSnapshotRequest(repository, name)); + public RestoreSnapshotRequestBuilder(ElasticsearchClient client, String repository, String name) { + super(client, RestoreSnapshotAction.INSTANCE, new RestoreSnapshotRequest(repository, name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequestBuilder.java index 7f64f1bd6b76d..26caf4307686b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusRequestBuilder.java @@ -23,15 +23,15 @@ public class SnapshotsStatusRequestBuilder extends MasterNodeOperationRequestBui /** * Constructs the new snapshot status request */ - public SnapshotsStatusRequestBuilder(ElasticsearchClient client, SnapshotsStatusAction action) { - super(client, action, new SnapshotsStatusRequest()); + public SnapshotsStatusRequestBuilder(ElasticsearchClient client) { + super(client, SnapshotsStatusAction.INSTANCE, new SnapshotsStatusRequest()); } /** * Constructs the new snapshot status request with specified repository */ - public SnapshotsStatusRequestBuilder(ElasticsearchClient client, SnapshotsStatusAction action, String repository) { - super(client, action, new SnapshotsStatusRequest(repository)); + public SnapshotsStatusRequestBuilder(ElasticsearchClient client, String repository) { + super(client, SnapshotsStatusAction.INSTANCE, new SnapshotsStatusRequest(repository)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestBuilder.java index f24d013794a42..f8e49fbd4afc6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestBuilder.java @@ -19,8 +19,8 @@ public class ClusterStateRequestBuilder extends MasterNodeReadOperationRequestBu ClusterStateResponse, ClusterStateRequestBuilder> { - public ClusterStateRequestBuilder(ElasticsearchClient client, ClusterStateAction action) { - super(client, action, new ClusterStateRequest()); + public ClusterStateRequestBuilder(ElasticsearchClient client) { + super(client, ClusterStateAction.INSTANCE, new ClusterStateRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequestBuilder.java index 0213c3d2861ac..e82132b59e3fe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequestBuilder.java @@ -16,7 +16,7 @@ public class ClusterStatsRequestBuilder extends NodesOperationRequestBuilder< ClusterStatsResponse, ClusterStatsRequestBuilder> { - public ClusterStatsRequestBuilder(ElasticsearchClient client, ClusterStatsAction action) { - super(client, action, new ClusterStatsRequest()); + public ClusterStatsRequestBuilder(ElasticsearchClient client) { + super(client, ClusterStatsAction.INSTANCE, new ClusterStatsRequest()); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptRequestBuilder.java index e5def5f928a66..d8f22216073a5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptRequestBuilder.java @@ -17,8 +17,8 @@ public class DeleteStoredScriptRequestBuilder extends AcknowledgedRequestBuilder AcknowledgedResponse, DeleteStoredScriptRequestBuilder> { - public DeleteStoredScriptRequestBuilder(ElasticsearchClient client, DeleteStoredScriptAction action) { - super(client, action, new DeleteStoredScriptRequest()); + public DeleteStoredScriptRequestBuilder(ElasticsearchClient client) { + super(client, DeleteStoredScriptAction.INSTANCE, new DeleteStoredScriptRequest()); } public DeleteStoredScriptRequestBuilder setId(String id) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestBuilder.java index 29343324b268c..3a3bda5be016a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestBuilder.java @@ -16,8 +16,8 @@ public class GetStoredScriptRequestBuilder extends MasterNodeReadOperationReques GetStoredScriptResponse, GetStoredScriptRequestBuilder> { - public GetStoredScriptRequestBuilder(ElasticsearchClient client, GetStoredScriptAction action) { - super(client, action, new GetStoredScriptRequest()); + public GetStoredScriptRequestBuilder(ElasticsearchClient client) { + super(client, GetStoredScriptAction.INSTANCE, new GetStoredScriptRequest()); } public GetStoredScriptRequestBuilder setId(String id) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestBuilder.java index a4a5543a90ce8..24f5900629cfb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestBuilder.java @@ -19,8 +19,8 @@ public class PutStoredScriptRequestBuilder extends AcknowledgedRequestBuilder< AcknowledgedResponse, PutStoredScriptRequestBuilder> { - public PutStoredScriptRequestBuilder(ElasticsearchClient client, PutStoredScriptAction action) { - super(client, action, new PutStoredScriptRequest()); + public PutStoredScriptRequestBuilder(ElasticsearchClient client) { + super(client, PutStoredScriptAction.INSTANCE, new PutStoredScriptRequest()); } public PutStoredScriptRequestBuilder setId(String id) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksRequestBuilder.java index 10d84e9f9e603..4de6d262a06e2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksRequestBuilder.java @@ -16,7 +16,7 @@ public class PendingClusterTasksRequestBuilder extends MasterNodeReadOperationRe PendingClusterTasksResponse, PendingClusterTasksRequestBuilder> { - public PendingClusterTasksRequestBuilder(ElasticsearchClient client, PendingClusterTasksAction action) { - super(client, action, new PendingClusterTasksRequest()); + public PendingClusterTasksRequestBuilder(ElasticsearchClient client) { + super(client, PendingClusterTasksAction.INSTANCE, new PendingClusterTasksRequest()); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java index ac4c0f6a75d9c..47aaf53bd1c98 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequestBuilder.java @@ -24,8 +24,8 @@ public class IndicesAliasesRequestBuilder extends AcknowledgedRequestBuilder< AcknowledgedResponse, IndicesAliasesRequestBuilder> { - public IndicesAliasesRequestBuilder(ElasticsearchClient client, IndicesAliasesAction action) { - super(client, action, new IndicesAliasesRequest()); + public IndicesAliasesRequestBuilder(ElasticsearchClient client) { + super(client, IndicesAliasesAction.INSTANCE, new IndicesAliasesRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequestBuilder.java index a7fd1f02f4650..c64ef8a9b5437 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequestBuilder.java @@ -12,7 +12,7 @@ public class GetAliasesRequestBuilder extends BaseAliasesRequestBuilder { - public GetAliasesRequestBuilder(ElasticsearchClient client, GetAliasesAction action, String... aliases) { - super(client, action, aliases); + public GetAliasesRequestBuilder(ElasticsearchClient client, String... aliases) { + super(client, GetAliasesAction.INSTANCE, aliases); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java index fe07ce83da578..708ba01019146 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java @@ -17,12 +17,12 @@ public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder< AnalyzeAction.Response, AnalyzeRequestBuilder> { - public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action) { - super(client, action, new AnalyzeAction.Request()); + public AnalyzeRequestBuilder(ElasticsearchClient client) { + super(client, AnalyzeAction.INSTANCE, new AnalyzeAction.Request()); } - public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action, String index, String... text) { - super(client, action, new AnalyzeAction.Request(index).text(text)); + public AnalyzeRequestBuilder(ElasticsearchClient client, String index, String... text) { + super(client, AnalyzeAction.INSTANCE, new AnalyzeAction.Request(index).text(text)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java index 8610b4ef8ce7c..464c22d1119b0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java @@ -16,8 +16,8 @@ public class ClearIndicesCacheRequestBuilder extends BroadcastOperationRequestBu ClearIndicesCacheResponse, ClearIndicesCacheRequestBuilder> { - public ClearIndicesCacheRequestBuilder(ElasticsearchClient client, ClearIndicesCacheAction action) { - super(client, action, new ClearIndicesCacheRequest()); + public ClearIndicesCacheRequestBuilder(ElasticsearchClient client) { + super(client, ClearIndicesCacheAction.INSTANCE, new ClearIndicesCacheRequest()); } public ClearIndicesCacheRequestBuilder setQueryCache(boolean queryCache) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestBuilder.java index 3d6bf0ff15bb1..0a9d7fb1bcf7a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequestBuilder.java @@ -18,8 +18,8 @@ */ public class CloseIndexRequestBuilder extends AcknowledgedRequestBuilder { - public CloseIndexRequestBuilder(ElasticsearchClient client, CloseIndexAction action, String... indices) { - super(client, action, new CloseIndexRequest(indices)); + public CloseIndexRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, CloseIndexAction.INSTANCE, new CloseIndexRequest(indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java index c07734aee557c..7052d4b1356ac 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -28,12 +28,12 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder< CreateIndexResponse, CreateIndexRequestBuilder> { - public CreateIndexRequestBuilder(ElasticsearchClient client, CreateIndexAction action) { - super(client, action, new CreateIndexRequest()); + public CreateIndexRequestBuilder(ElasticsearchClient client) { + super(client, CreateIndexAction.INSTANCE, new CreateIndexRequest()); } - public CreateIndexRequestBuilder(ElasticsearchClient client, CreateIndexAction action, String index) { - super(client, action, new CreateIndexRequest(index)); + public CreateIndexRequestBuilder(ElasticsearchClient client, String index) { + super(client, CreateIndexAction.INSTANCE, new CreateIndexRequest(index)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequestBuilder.java index 62be381a40540..a6ae02dddde20 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequestBuilder.java @@ -18,8 +18,8 @@ public class DeleteIndexRequestBuilder extends AcknowledgedRequestBuilder< AcknowledgedResponse, DeleteIndexRequestBuilder> { - public DeleteIndexRequestBuilder(ElasticsearchClient client, DeleteIndexAction action, String... indices) { - super(client, action, new DeleteIndexRequest(indices)); + public DeleteIndexRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, DeleteIndexAction.INSTANCE, new DeleteIndexRequest(indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java index 75dcced1f35a6..4e474732e3bad 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java @@ -13,8 +13,8 @@ public class FlushRequestBuilder extends BroadcastOperationRequestBuilder { - public FlushRequestBuilder(ElasticsearchClient client, FlushAction action) { - super(client, action, new FlushRequest()); + public FlushRequestBuilder(ElasticsearchClient client) { + super(client, FlushAction.INSTANCE, new FlushRequest()); } public FlushRequestBuilder setForce(boolean force) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java index 4e24eca579be1..835749751f4a6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java @@ -23,8 +23,8 @@ public class ForceMergeRequestBuilder extends BroadcastOperationRequestBuilder< ForceMergeResponse, ForceMergeRequestBuilder> { - public ForceMergeRequestBuilder(ElasticsearchClient client, ForceMergeAction action) { - super(client, action, new ForceMergeRequest()); + public ForceMergeRequestBuilder(ElasticsearchClient client) { + super(client, ForceMergeAction.INSTANCE, new ForceMergeRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestBuilder.java index 53e0d8f69fd26..032d64b6acf02 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequestBuilder.java @@ -14,8 +14,8 @@ public class GetIndexRequestBuilder extends ClusterInfoRequestBuilder { - public GetIndexRequestBuilder(ElasticsearchClient client, GetIndexAction action, String... indices) { - super(client, action, new GetIndexRequest().indices(indices)); + public GetIndexRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, GetIndexAction.INSTANCE, new GetIndexRequest().indices(indices)); } public GetIndexRequestBuilder setFeatures(Feature... features) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java index 6492c23e4924d..98e0d626102cd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java @@ -16,8 +16,8 @@ /** A helper class to build {@link GetFieldMappingsRequest} objects */ public class GetFieldMappingsRequestBuilder extends ActionRequestBuilder { - public GetFieldMappingsRequestBuilder(ElasticsearchClient client, GetFieldMappingsAction action, String... indices) { - super(client, action, new GetFieldMappingsRequest().indices(indices)); + public GetFieldMappingsRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, GetFieldMappingsAction.INSTANCE, new GetFieldMappingsRequest().indices(indices)); } public GetFieldMappingsRequestBuilder setIndices(String... indices) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequestBuilder.java index 0bf65b27328d8..1b1f52a7133f1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsRequestBuilder.java @@ -16,7 +16,7 @@ public class GetMappingsRequestBuilder extends ClusterInfoRequestBuilder< GetMappingsResponse, GetMappingsRequestBuilder> { - public GetMappingsRequestBuilder(ElasticsearchClient client, GetMappingsAction action, String... indices) { - super(client, action, new GetMappingsRequest().indices(indices)); + public GetMappingsRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, GetMappingsAction.INSTANCE, new GetMappingsRequest().indices(indices)); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java index 34b46bb8e090f..5db50f787c477 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java @@ -26,8 +26,8 @@ public class PutMappingRequestBuilder extends AcknowledgedRequestBuilder< AcknowledgedResponse, PutMappingRequestBuilder> { - public PutMappingRequestBuilder(ElasticsearchClient client, PutMappingAction action) { - super(client, action, new PutMappingRequest()); + public PutMappingRequestBuilder(ElasticsearchClient client) { + super(client, PutMappingAction.INSTANCE, new PutMappingRequest()); } public PutMappingRequestBuilder setIndices(String... indices) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequestBuilder.java index 85a31925901d4..8784d7d924d75 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequestBuilder.java @@ -18,8 +18,8 @@ */ public class OpenIndexRequestBuilder extends AcknowledgedRequestBuilder { - public OpenIndexRequestBuilder(ElasticsearchClient client, OpenIndexAction action, String... indices) { - super(client, action, new OpenIndexRequest(indices)); + public OpenIndexRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, OpenIndexAction.INSTANCE, new OpenIndexRequest(indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java index 9db890428e456..5b16e6889ad22 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java @@ -21,8 +21,8 @@ public class AddIndexBlockRequestBuilder extends AcknowledgedRequestBuilder< AddIndexBlockResponse, AddIndexBlockRequestBuilder> { - public AddIndexBlockRequestBuilder(ElasticsearchClient client, AddIndexBlockAction action, APIBlock block, String... indices) { - super(client, action, new AddIndexBlockRequest(block, indices)); + public AddIndexBlockRequestBuilder(ElasticsearchClient client, APIBlock block, String... indices) { + super(client, AddIndexBlockAction.INSTANCE, new AddIndexBlockRequest(block, indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequestBuilder.java index 1a38aff03a824..1d559eca8c186 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryRequestBuilder.java @@ -19,8 +19,8 @@ public class RecoveryRequestBuilder extends BroadcastOperationRequestBuilder { - public RefreshRequestBuilder(ElasticsearchClient client, RefreshAction action) { - super(client, action, new RefreshRequest()); + public RefreshRequestBuilder(ElasticsearchClient client) { + super(client, RefreshAction.INSTANCE, new RefreshRequest()); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestBuilder.java index 14953a01e477a..7f96662719c96 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestBuilder.java @@ -14,8 +14,8 @@ import org.elasticsearch.common.settings.Settings; public class RolloverRequestBuilder extends MasterNodeOperationRequestBuilder { - public RolloverRequestBuilder(ElasticsearchClient client, RolloverAction action) { - super(client, action, new RolloverRequest()); + public RolloverRequestBuilder(ElasticsearchClient client) { + super(client, RolloverAction.INSTANCE, new RolloverRequest()); } public RolloverRequestBuilder setRolloverTarget(String rolloverTarget) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestBuilder.java index 908564071f39c..f376e6cd05b4f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestBuilder.java @@ -16,7 +16,7 @@ public class IndicesSegmentsRequestBuilder extends BroadcastOperationRequestBuil IndicesSegmentResponse, IndicesSegmentsRequestBuilder> { - public IndicesSegmentsRequestBuilder(ElasticsearchClient client, IndicesSegmentsAction action) { - super(client, action, new IndicesSegmentsRequest()); + public IndicesSegmentsRequestBuilder(ElasticsearchClient client) { + super(client, IndicesSegmentsAction.INSTANCE, new IndicesSegmentsRequest()); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestBuilder.java index f3e560daf3772..166e5cb497ad6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequestBuilder.java @@ -18,8 +18,8 @@ public class GetSettingsRequestBuilder extends MasterNodeReadOperationRequestBui GetSettingsResponse, GetSettingsRequestBuilder> { - public GetSettingsRequestBuilder(ElasticsearchClient client, GetSettingsAction action, String... indices) { - super(client, action, new GetSettingsRequest().indices(indices)); + public GetSettingsRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, GetSettingsAction.INSTANCE, new GetSettingsRequest().indices(indices)); } public GetSettingsRequestBuilder setIndices(String... indices) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestBuilder.java index c032d062bfe4f..a48efa31302bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestBuilder.java @@ -25,8 +25,8 @@ public class UpdateSettingsRequestBuilder extends AcknowledgedRequestBuilder< AcknowledgedResponse, UpdateSettingsRequestBuilder> { - public UpdateSettingsRequestBuilder(ElasticsearchClient client, UpdateSettingsAction action, String... indices) { - super(client, action, new UpdateSettingsRequest(indices)); + public UpdateSettingsRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, UpdateSettingsAction.INSTANCE, new UpdateSettingsRequest(indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java index a5c6c27964a9e..7165953fa85ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.admin.indices.shards; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -22,8 +21,8 @@ public class IndicesShardStoreRequestBuilder extends MasterNodeReadOperationRequ IndicesShardStoresResponse, IndicesShardStoreRequestBuilder> { - public IndicesShardStoreRequestBuilder(ElasticsearchClient client, ActionType action, String... indices) { - super(client, action, new IndicesShardStoresRequest(indices)); + public IndicesShardStoreRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, IndicesShardStoresAction.INSTANCE, new IndicesShardStoresRequest(indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java index 172f84e15d700..a4972d1a98e7d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.action.admin.indices.shrink; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; @@ -16,8 +15,8 @@ import org.elasticsearch.common.unit.ByteSizeValue; public class ResizeRequestBuilder extends AcknowledgedRequestBuilder { - public ResizeRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new ResizeRequest()); + public ResizeRequestBuilder(ElasticsearchClient client) { + super(client, ResizeAction.INSTANCE, new ResizeRequest()); } public ResizeRequestBuilder setTargetIndex(CreateIndexRequest request) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java index 708011473343c..40d0c0998b4e7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsRequestBuilder.java @@ -25,8 +25,8 @@ public class IndicesStatsRequestBuilder extends BroadcastOperationRequestBuilder IndicesStatsResponse, IndicesStatsRequestBuilder> { - public IndicesStatsRequestBuilder(ElasticsearchClient client, IndicesStatsAction action) { - super(client, action, new IndicesStatsRequest()); + public IndicesStatsRequestBuilder(ElasticsearchClient client) { + super(client, IndicesStatsAction.INSTANCE, new IndicesStatsRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequestBuilder.java index a203f810ebf3d..f020474e680d9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequestBuilder.java @@ -16,8 +16,8 @@ public class DeleteIndexTemplateRequestBuilder extends MasterNodeOperationReques AcknowledgedResponse, DeleteIndexTemplateRequestBuilder> { - public DeleteIndexTemplateRequestBuilder(ElasticsearchClient client, DeleteIndexTemplateAction action, String name) { - super(client, action, new DeleteIndexTemplateRequest(name)); + public DeleteIndexTemplateRequestBuilder(ElasticsearchClient client, String name) { + super(client, DeleteIndexTemplateAction.INSTANCE, new DeleteIndexTemplateRequest(name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequestBuilder.java index 194ac7b77f65c..63ec0ae677e9b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequestBuilder.java @@ -15,7 +15,7 @@ public class GetIndexTemplatesRequestBuilder extends MasterNodeReadOperationRequ GetIndexTemplatesResponse, GetIndexTemplatesRequestBuilder> { - public GetIndexTemplatesRequestBuilder(ElasticsearchClient client, GetIndexTemplatesAction action, String... names) { - super(client, action, new GetIndexTemplatesRequest(names)); + public GetIndexTemplatesRequestBuilder(ElasticsearchClient client, String... names) { + super(client, GetIndexTemplatesAction.INSTANCE, new GetIndexTemplatesRequest(names)); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java index 7dc19ff52ce84..45af625cf2f65 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java @@ -24,8 +24,8 @@ public class PutIndexTemplateRequestBuilder extends MasterNodeOperationRequestBu AcknowledgedResponse, PutIndexTemplateRequestBuilder> { - public PutIndexTemplateRequestBuilder(ElasticsearchClient client, PutIndexTemplateAction action, String name) { - super(client, action, new PutIndexTemplateRequest(name)); + public PutIndexTemplateRequestBuilder(ElasticsearchClient client, String name) { + super(client, PutIndexTemplateAction.INSTANCE, new PutIndexTemplateRequest(name)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java index a0eb40d1749e9..d2a4f25dc6b3d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequestBuilder.java @@ -17,8 +17,8 @@ public class ValidateQueryRequestBuilder extends BroadcastOperationRequestBuilde ValidateQueryResponse, ValidateQueryRequestBuilder> { - public ValidateQueryRequestBuilder(ElasticsearchClient client, ValidateQueryAction action) { - super(client, action, new ValidateQueryRequest()); + public ValidateQueryRequestBuilder(ElasticsearchClient client) { + super(client, ValidateQueryAction.INSTANCE, new ValidateQueryRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java index fe0488962c8f0..2b961b6bc7351 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java @@ -29,12 +29,12 @@ */ public class BulkRequestBuilder extends ActionRequestBuilder implements WriteRequestBuilder { - public BulkRequestBuilder(ElasticsearchClient client, BulkAction action, @Nullable String globalIndex) { - super(client, action, new BulkRequest(globalIndex)); + public BulkRequestBuilder(ElasticsearchClient client, @Nullable String globalIndex) { + super(client, BulkAction.INSTANCE, new BulkRequest(globalIndex)); } - public BulkRequestBuilder(ElasticsearchClient client, BulkAction action) { - super(client, action, new BulkRequest()); + public BulkRequestBuilder(ElasticsearchClient client) { + super(client, BulkAction.INSTANCE, new BulkRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java index 29f8e4aba35f8..7107073b6b738 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java @@ -21,8 +21,8 @@ public class DeleteRequestBuilder extends ReplicationRequestBuilder { - public DeleteRequestBuilder(ElasticsearchClient client, DeleteAction action, @Nullable String index) { - super(client, action, new DeleteRequest(index)); + public DeleteRequestBuilder(ElasticsearchClient client, @Nullable String index) { + super(client, DeleteAction.INSTANCE, new DeleteRequest(index)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java index 9ae05687649ea..f8684cdb4bb0c 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainRequestBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.explain; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.single.shard.SingleShardOperationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.common.Strings; @@ -21,8 +20,8 @@ */ public class ExplainRequestBuilder extends SingleShardOperationRequestBuilder { - public ExplainRequestBuilder(ElasticsearchClient client, ActionType action, String index, String id) { - super(client, action, new ExplainRequest().index(index).id(id)); + public ExplainRequestBuilder(ElasticsearchClient client, String index, String id) { + super(client, TransportExplainAction.TYPE, new ExplainRequest().index(index).id(id)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java index 16f94e6c62c46..892a08c837949 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestBuilder.java @@ -15,8 +15,8 @@ import java.util.Map; public class FieldCapabilitiesRequestBuilder extends ActionRequestBuilder { - public FieldCapabilitiesRequestBuilder(ElasticsearchClient client, FieldCapabilitiesAction action, String... indices) { - super(client, action, new FieldCapabilitiesRequest().indices(indices)); + public FieldCapabilitiesRequestBuilder(ElasticsearchClient client, String... indices) { + super(client, FieldCapabilitiesAction.INSTANCE, new FieldCapabilitiesRequest().indices(indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java index 6abaeeefb7e1a..f9e748a6e2b22 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetRequestBuilder.java @@ -20,12 +20,12 @@ */ public class GetRequestBuilder extends SingleShardOperationRequestBuilder { - public GetRequestBuilder(ElasticsearchClient client, GetAction action) { - super(client, action, new GetRequest()); + public GetRequestBuilder(ElasticsearchClient client) { + super(client, GetAction.INSTANCE, new GetRequest()); } - public GetRequestBuilder(ElasticsearchClient client, GetAction action, @Nullable String index) { - super(client, action, new GetRequest(index)); + public GetRequestBuilder(ElasticsearchClient client, @Nullable String index) { + super(client, GetAction.INSTANCE, new GetRequest(index)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java index 08371a054c6e4..f872406dbeda2 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetRequestBuilder.java @@ -16,8 +16,8 @@ */ public class MultiGetRequestBuilder extends ActionRequestBuilder { - public MultiGetRequestBuilder(ElasticsearchClient client, MultiGetAction action) { - super(client, action, new MultiGetRequest()); + public MultiGetRequestBuilder(ElasticsearchClient client) { + super(client, MultiGetAction.INSTANCE, new MultiGetRequest()); } public MultiGetRequestBuilder add(String index, String id) { diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index 360b470eb1ab4..4cc26f056245a 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -28,12 +28,12 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder { - public IndexRequestBuilder(ElasticsearchClient client, IndexAction action) { - super(client, action, new IndexRequest()); + public IndexRequestBuilder(ElasticsearchClient client) { + super(client, IndexAction.INSTANCE, new IndexRequest()); } - public IndexRequestBuilder(ElasticsearchClient client, IndexAction action, @Nullable String index) { - super(client, action, new IndexRequest(index)); + public IndexRequestBuilder(ElasticsearchClient client, @Nullable String index) { + super(client, IndexAction.INSTANCE, new IndexRequest(index)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java index fdc0e7ba42d92..e953bf7d4ec6b 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java @@ -14,8 +14,8 @@ public class DeletePipelineRequestBuilder extends ActionRequestBuilder { - public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action, String id) { - super(client, action, new DeletePipelineRequest(id)); + public DeletePipelineRequestBuilder(ElasticsearchClient client, String id) { + super(client, DeletePipelineAction.INSTANCE, new DeletePipelineRequest(id)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java index 48d5fa0f0968a..ca873c5aa3843 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java @@ -16,8 +16,8 @@ public class GetPipelineRequestBuilder extends MasterNodeReadOperationRequestBui GetPipelineResponse, GetPipelineRequestBuilder> { - public GetPipelineRequestBuilder(ElasticsearchClient client, GetPipelineAction action, String[] ids) { - super(client, action, new GetPipelineRequest(ids)); + public GetPipelineRequestBuilder(ElasticsearchClient client, String[] ids) { + super(client, GetPipelineAction.INSTANCE, new GetPipelineRequest(ids)); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java index f7a90b94d37ca..13fac13ef5437 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java @@ -16,13 +16,7 @@ public class PutPipelineRequestBuilder extends ActionRequestBuilder { - public PutPipelineRequestBuilder( - ElasticsearchClient client, - PutPipelineAction action, - String id, - BytesReference source, - XContentType xContentType - ) { - super(client, action, new PutPipelineRequest(id, source, xContentType)); + public PutPipelineRequestBuilder(ElasticsearchClient client, String id, BytesReference source, XContentType xContentType) { + super(client, PutPipelineAction.INSTANCE, new PutPipelineRequest(id, source, xContentType)); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java index 93f5ab9b78913..0c9995c6edf3c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequestBuilder.java @@ -18,13 +18,8 @@ public class SimulatePipelineRequestBuilder extends ActionRequestBuilder { - public ClearScrollRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new ClearScrollRequest()); + public ClearScrollRequestBuilder(ElasticsearchClient client) { + super(client, TransportClearScrollAction.TYPE, new ClearScrollRequest()); } public ClearScrollRequestBuilder setScrollIds(List cursorIds) { diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequestBuilder.java index 20888d652c8ac..67c9a33a2e801 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequestBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -18,8 +17,8 @@ */ public class MultiSearchRequestBuilder extends ActionRequestBuilder { - public MultiSearchRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new MultiSearchRequest()); + public MultiSearchRequestBuilder(ElasticsearchClient client) { + super(client, TransportMultiSearchAction.TYPE, new MultiSearchRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 4588c707c3beb..60f92cacba963 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.core.Nullable; @@ -42,8 +41,8 @@ */ public class SearchRequestBuilder extends ActionRequestBuilder { - public SearchRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new SearchRequest()); + public SearchRequestBuilder(ElasticsearchClient client) { + super(client, TransportSearchAction.TYPE, new SearchRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollRequestBuilder.java index 4de27b8430417..d5c55280b1917 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollRequestBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.Scroll; @@ -19,12 +18,12 @@ */ public class SearchScrollRequestBuilder extends ActionRequestBuilder { - public SearchScrollRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new SearchScrollRequest()); + public SearchScrollRequestBuilder(ElasticsearchClient client) { + super(client, TransportSearchScrollAction.TYPE, new SearchScrollRequest()); } - public SearchScrollRequestBuilder(ElasticsearchClient client, ActionType action, String scrollId) { - super(client, action, new SearchScrollRequest(scrollId)); + public SearchScrollRequestBuilder(ElasticsearchClient client, String scrollId) { + super(client, TransportSearchScrollAction.TYPE, new SearchScrollRequest(scrollId)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequestBuilder.java index 70f60740a966e..aa3346851917e 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequestBuilder.java @@ -13,8 +13,8 @@ public class MultiTermVectorsRequestBuilder extends ActionRequestBuilder { - public MultiTermVectorsRequestBuilder(ElasticsearchClient client, MultiTermVectorsAction action) { - super(client, action, new MultiTermVectorsRequest()); + public MultiTermVectorsRequestBuilder(ElasticsearchClient client) { + super(client, MultiTermVectorsAction.INSTANCE, new MultiTermVectorsRequest()); } public MultiTermVectorsRequestBuilder add(String index, Iterable ids) { diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java index abf17021898c5..308f87e07d1e2 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequestBuilder.java @@ -24,8 +24,8 @@ */ public class TermVectorsRequestBuilder extends ActionRequestBuilder { - public TermVectorsRequestBuilder(ElasticsearchClient client, TermVectorsAction action) { - super(client, action, new TermVectorsRequest()); + public TermVectorsRequestBuilder(ElasticsearchClient client) { + super(client, TermVectorsAction.INSTANCE, new TermVectorsRequest()); } /** @@ -33,8 +33,8 @@ public TermVectorsRequestBuilder(ElasticsearchClient client, TermVectorsAction a * from the provided index. Use {@code index}, {@code type} and * {@code id} to specify the document to load. */ - public TermVectorsRequestBuilder(ElasticsearchClient client, TermVectorsAction action, String index, String id) { - super(client, action, new TermVectorsRequest(index, id)); + public TermVectorsRequestBuilder(ElasticsearchClient client, String index, String id) { + super(client, TermVectorsAction.INSTANCE, new TermVectorsRequest(index, id)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index 482ebd3489916..6b7d79abc51a3 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -26,12 +26,12 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder { - public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action) { - super(client, action, new UpdateRequest()); + public UpdateRequestBuilder(ElasticsearchClient client) { + super(client, UpdateAction.INSTANCE, new UpdateRequest()); } - public UpdateRequestBuilder(ElasticsearchClient client, UpdateAction action, String index, String id) { - super(client, action, new UpdateRequest(index, id)); + public UpdateRequestBuilder(ElasticsearchClient client, String index, String id) { + super(client, UpdateAction.INSTANCE, new UpdateRequest(index, id)); } /** diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 82d0f2fb85847..d5fa2f7796252 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -32,7 +32,6 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; -import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsAction; import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; @@ -404,12 +403,12 @@ public void index(final IndexRequest request, final ActionListener l @Override public BulkRequestBuilder prepareBulk() { - return new BulkRequestBuilder(this, BulkAction.INSTANCE); + return new BulkRequestBuilder(this); } @Override public BulkRequestBuilder prepareBulk(@Nullable String globalIndex) { - return new BulkRequestBuilder(this, BulkAction.INSTANCE, globalIndex); + return new BulkRequestBuilder(this, globalIndex); } @Override @@ -484,7 +483,7 @@ public void get(final GetRequest request, final ActionListener list @Override public GetRequestBuilder prepareGet() { - return new GetRequestBuilder(this, GetAction.INSTANCE, null); + return new GetRequestBuilder(this, null); } @Override @@ -504,7 +503,7 @@ public void multiGet(final MultiGetRequest request, final ActionListener clearScroll(ClearScrollRequest request) @Override public ClearScrollRequestBuilder prepareClearScroll() { - return new ClearScrollRequestBuilder(this, TransportClearScrollAction.TYPE); + return new ClearScrollRequestBuilder(this); } @Override @@ -629,7 +628,7 @@ public ActionFuture fieldCaps(FieldCapabilitiesReques @Override public FieldCapabilitiesRequestBuilder prepareFieldCaps(String... indices) { - return new FieldCapabilitiesRequestBuilder(this, FieldCapabilitiesAction.INSTANCE, indices); + return new FieldCapabilitiesRequestBuilder(this, indices); } static class Admin implements AdminClient { @@ -695,7 +694,7 @@ public void health(final ClusterHealthRequest request, final ActionListener putPipeline(PutPipelineRequest request @Override public PutPipelineRequestBuilder preparePutPipeline(String id, BytesReference source, XContentType xContentType) { - return new PutPipelineRequestBuilder(this, PutPipelineAction.INSTANCE, id, source, xContentType); + return new PutPipelineRequestBuilder(this, id, source, xContentType); } @Override @@ -1028,7 +1027,7 @@ public ActionFuture deletePipeline(DeletePipelineRequest r @Override public DeletePipelineRequestBuilder prepareDeletePipeline(String id) { - return new DeletePipelineRequestBuilder(this, DeletePipelineAction.INSTANCE, id); + return new DeletePipelineRequestBuilder(this, id); } @Override @@ -1038,7 +1037,7 @@ public void getPipeline(GetPipelineRequest request, ActionListener simulatePipeline(SimulatePipelineR @Override public SimulatePipelineRequestBuilder prepareSimulatePipeline(BytesReference source, XContentType xContentType) { - return new SimulatePipelineRequestBuilder(this, SimulatePipelineAction.INSTANCE, source, xContentType); + return new SimulatePipelineRequestBuilder(this, source, xContentType); } @Override @@ -1108,12 +1107,12 @@ public void deleteDanglingIndex(DeleteDanglingIndexRequest request, ActionListen @Override public GetStoredScriptRequestBuilder prepareGetStoredScript(String id) { - return new GetStoredScriptRequestBuilder(this, GetStoredScriptAction.INSTANCE).setId(id); + return new GetStoredScriptRequestBuilder(this).setId(id); } @Override public PutStoredScriptRequestBuilder preparePutStoredScript() { - return new PutStoredScriptRequestBuilder(this, PutStoredScriptAction.INSTANCE); + return new PutStoredScriptRequestBuilder(this); } @Override @@ -1129,7 +1128,7 @@ public void deleteStoredScript(DeleteStoredScriptRequest request, ActionListener @Override public DeleteStoredScriptRequestBuilder prepareDeleteStoredScript(String id) { - return new DeleteStoredScriptRequestBuilder(client, DeleteStoredScriptAction.INSTANCE).setId(id); + return new DeleteStoredScriptRequestBuilder(client).setId(id); } } @@ -1175,7 +1174,7 @@ public void aliases(final IndicesAliasesRequest request, final ActionListener l @Override public GetIndexRequestBuilder prepareGetIndex() { - return new GetIndexRequestBuilder(this, GetIndexAction.INSTANCE); + return new GetIndexRequestBuilder(this); } @Override @@ -1220,7 +1219,7 @@ public void clearCache(final ClearIndicesCacheRequest request, final ActionListe @Override public ClearIndicesCacheRequestBuilder prepareClearCache(String... indices) { - return new ClearIndicesCacheRequestBuilder(this, ClearIndicesCacheAction.INSTANCE).setIndices(indices); + return new ClearIndicesCacheRequestBuilder(this).setIndices(indices); } @Override @@ -1235,7 +1234,7 @@ public void create(final CreateIndexRequest request, final ActionListener getMappings(GetMappingsRequest request) @Override public GetFieldMappingsRequestBuilder prepareGetFieldMappings(String... indices) { - return new GetFieldMappingsRequestBuilder(this, GetFieldMappingsAction.INSTANCE, indices); + return new GetFieldMappingsRequestBuilder(this, indices); } @Override @@ -1350,7 +1349,7 @@ public void putMapping(final PutMappingRequest request, final ActionListener li @Override public RolloverRequestBuilder prepareRolloverIndex(String alias) { - return new RolloverRequestBuilder(this, RolloverAction.INSTANCE).setRolloverTarget(alias); + return new RolloverRequestBuilder(this).setRolloverTarget(alias); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java index 7f6465e9bce8a..49d3c660a4b68 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java @@ -8,19 +8,17 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.ElasticsearchClient; public class DeleteByQueryRequestBuilder extends AbstractBulkByScrollRequestBuilder { - public DeleteByQueryRequestBuilder(ElasticsearchClient client, ActionType action) { - this(client, action, new SearchRequestBuilder(client, TransportSearchAction.TYPE)); + public DeleteByQueryRequestBuilder(ElasticsearchClient client) { + this(client, new SearchRequestBuilder(client)); } - private DeleteByQueryRequestBuilder(ElasticsearchClient client, ActionType action, SearchRequestBuilder search) { - super(client, action, search, new DeleteByQueryRequest(search.request())); + private DeleteByQueryRequestBuilder(ElasticsearchClient client, SearchRequestBuilder search) { + super(client, DeleteByQueryAction.INSTANCE, search, new DeleteByQueryRequest(search.request())); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java index e79d06ceba0b5..88a851bee15e0 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java @@ -8,32 +8,19 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.ElasticsearchClient; public class ReindexRequestBuilder extends AbstractBulkIndexByScrollRequestBuilder { private final IndexRequestBuilder destination; - public ReindexRequestBuilder(ElasticsearchClient client, ActionType action) { - this( - client, - action, - new SearchRequestBuilder(client, TransportSearchAction.TYPE), - new IndexRequestBuilder(client, IndexAction.INSTANCE) - ); + public ReindexRequestBuilder(ElasticsearchClient client) { + this(client, new SearchRequestBuilder(client), new IndexRequestBuilder(client)); } - private ReindexRequestBuilder( - ElasticsearchClient client, - ActionType action, - SearchRequestBuilder search, - IndexRequestBuilder destination - ) { - super(client, action, search, new ReindexRequest(search.request(), destination.request())); + private ReindexRequestBuilder(ElasticsearchClient client, SearchRequestBuilder search, IndexRequestBuilder destination) { + super(client, ReindexAction.INSTANCE, search, new ReindexRequest(search.request(), destination.request())); this.destination = destination; } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java index 2dfd786378365..b63ebdf1def86 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java @@ -8,21 +8,19 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.ElasticsearchClient; public class UpdateByQueryRequestBuilder extends AbstractBulkIndexByScrollRequestBuilder< UpdateByQueryRequest, UpdateByQueryRequestBuilder> { - public UpdateByQueryRequestBuilder(ElasticsearchClient client, ActionType action) { - this(client, action, new SearchRequestBuilder(client, TransportSearchAction.TYPE)); + public UpdateByQueryRequestBuilder(ElasticsearchClient client) { + this(client, new SearchRequestBuilder(client)); } - private UpdateByQueryRequestBuilder(ElasticsearchClient client, ActionType action, SearchRequestBuilder search) { - super(client, action, search, new UpdateByQueryRequest(search.request())); + private UpdateByQueryRequestBuilder(ElasticsearchClient client, SearchRequestBuilder search) { + super(client, UpdateByQueryAction.INSTANCE, search, new UpdateByQueryRequest(search.request())); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTests.java index 9941f84da7b9a..34231356105e8 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilderTests.java @@ -51,7 +51,7 @@ public void tearDown() throws Exception { * test setting the source with available setters */ public void testSetSource() throws IOException { - CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient, CreateIndexAction.INSTANCE); + CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient); ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> { builder.setSource(Strings.format("{ \"%s\": \"%s\" }", KEY, VALUE), XContentType.JSON); @@ -92,7 +92,7 @@ public void testSetSource() throws IOException { * test setting the settings with available setters */ public void testSetSettings() throws IOException { - CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient, CreateIndexAction.INSTANCE); + CreateIndexRequestBuilder builder = new CreateIndexRequestBuilder(this.testClient); builder.setSettings(Settings.builder().put(KEY, VALUE)); assertEquals(VALUE, builder.request().settings().get(KEY)); diff --git a/server/src/test/java/org/elasticsearch/action/get/TransportMultiGetActionTests.java b/server/src/test/java/org/elasticsearch/action/get/TransportMultiGetActionTests.java index 0b582e75c7a68..d2db4b7fa6984 100644 --- a/server/src/test/java/org/elasticsearch/action/get/TransportMultiGetActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/TransportMultiGetActionTests.java @@ -188,7 +188,7 @@ public static void afterClass() { public void testTransportMultiGetAction() { final Task task = createTask(); final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); - final MultiGetRequestBuilder request = new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE); + final MultiGetRequestBuilder request = new MultiGetRequestBuilder(client); request.add(new MultiGetRequest.Item("index1", "1")); request.add(new MultiGetRequest.Item("index1", "2")); @@ -221,7 +221,7 @@ protected void executeShardAction( public void testTransportMultiGetAction_withMissingRouting() { final Task task = createTask(); final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); - final MultiGetRequestBuilder request = new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE); + final MultiGetRequestBuilder request = new MultiGetRequestBuilder(client); request.add(new MultiGetRequest.Item("index2", "1").routing("1")); request.add(new MultiGetRequest.Item("index2", "2")); diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java index 2f66b9d3b70f8..e2f67d9387ff5 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java @@ -47,7 +47,7 @@ public void tearDown() throws Exception { * test setting the source for the request with different available setters */ public void testSetSource() throws Exception { - IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(this.testClient, IndexAction.INSTANCE); + IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(this.testClient); Map source = new HashMap<>(); source.put("SomeKey", "SomeValue"); indexRequestBuilder.setSource(source); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java index bb2b6f7903a6e..15595d825408a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java @@ -20,7 +20,7 @@ public class SearchRequestBuilderTests extends ESTestCase { private SearchRequestBuilder makeBuilder() { ElasticsearchClient client = Mockito.mock(ElasticsearchClient.class); - return new SearchRequestBuilder(client, TransportSearchAction.TYPE); + return new SearchRequestBuilder(client); } public void testEmptySourceToString() { diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java index f2614f69cfe8b..b6bc524b25e8f 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java @@ -190,7 +190,7 @@ public static void afterClass() { public void testTransportMultiGetAction() { final Task task = createTask(); final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); - final MultiTermVectorsRequestBuilder request = new MultiTermVectorsRequestBuilder(client, MultiTermVectorsAction.INSTANCE); + final MultiTermVectorsRequestBuilder request = new MultiTermVectorsRequestBuilder(client); request.add(new TermVectorsRequest("index1", "1")); request.add(new TermVectorsRequest("index2", "2")); @@ -222,7 +222,7 @@ protected void executeShardAction( public void testTransportMultiGetAction_withMissingRouting() { final Task task = createTask(); final NodeClient client = new NodeClient(Settings.EMPTY, threadPool); - final MultiTermVectorsRequestBuilder request = new MultiTermVectorsRequestBuilder(client, MultiTermVectorsAction.INSTANCE); + final MultiTermVectorsRequestBuilder request = new MultiTermVectorsRequestBuilder(client); request.add(new TermVectorsRequest("index2", "1").routing("1")); request.add(new TermVectorsRequest("index2", "2")); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchRequestParserTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchRequestParserTests.java index 5271289e37b7f..d23e909feee05 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchRequestParserTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnSearchRequestParserTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilder; @@ -224,7 +223,7 @@ private SearchRequestBuilder parseSearchRequest(XContentBuilder builder, Map { public DeleteLicenseRequestBuilder(ElasticsearchClient client) { - this(client, DeleteLicenseAction.INSTANCE); - } - - /** - * Creates new get licenses request builder - * - * @param client elasticsearch client - */ - public DeleteLicenseRequestBuilder(ElasticsearchClient client, DeleteLicenseAction action) { - super(client, action, new DeleteLicenseRequest()); + super(client, DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java index 1afa2783ecb09..532e63e24d09e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseRequestBuilder.java @@ -17,17 +17,13 @@ */ public class PutLicenseRequestBuilder extends AcknowledgedRequestBuilder { - public PutLicenseRequestBuilder(ElasticsearchClient client) { - this(client, PutLicenseAction.INSTANCE); - } - /** * Constructs register license request * * @param client elasticsearch client */ - public PutLicenseRequestBuilder(ElasticsearchClient client, PutLicenseAction action) { - super(client, action, new PutLicenseRequest()); + public PutLicenseRequestBuilder(ElasticsearchClient client) { + super(client, PutLicenseAction.INSTANCE, new PutLicenseRequest()); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequestBuilder.java index b20e1574e243c..d7786cba69ef8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequestBuilder.java @@ -16,11 +16,7 @@ public class XPackInfoRequestBuilder extends ActionRequestBuilder { public XPackInfoRequestBuilder(ElasticsearchClient client) { - this(client, XPackInfoAction.INSTANCE); - } - - public XPackInfoRequestBuilder(ElasticsearchClient client, XPackInfoAction action) { - super(client, action, new XPackInfoRequest()); + super(client, XPackInfoAction.INSTANCE, new XPackInfoRequest()); } public XPackInfoRequestBuilder setVerbose(boolean verbose) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java index 8786fd7e1486c..b8322d4fe0779 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreRequestBuilder.java @@ -26,8 +26,8 @@ */ public class GraphExploreRequestBuilder extends ActionRequestBuilder { - public GraphExploreRequestBuilder(ElasticsearchClient client, GraphExploreAction action) { - super(client, action, new GraphExploreRequest()); + public GraphExploreRequestBuilder(ElasticsearchClient client) { + super(client, GraphExploreAction.INSTANCE, new GraphExploreRequest()); } public GraphExploreRequestBuilder setIndices(String... indices) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java index 2ce1778941270..d07187b967929 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestBuilder.java @@ -25,11 +25,7 @@ public class PutRoleRequestBuilder extends ActionRequestBuilder { public PutRoleRequestBuilder(ElasticsearchClient client) { - this(client, PutRoleAction.INSTANCE); - } - - public PutRoleRequestBuilder(ElasticsearchClient client, PutRoleAction action) { - super(client, action, new PutRoleRequest()); + super(client, PutRoleAction.INSTANCE, new PutRoleRequest()); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java index 8b6f40a475baf..cf1aa526e0cea 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action.token; import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Nullable; @@ -17,8 +16,8 @@ */ public final class CreateTokenRequestBuilder extends ActionRequestBuilder { - public CreateTokenRequestBuilder(ElasticsearchClient client, ActionType action) { - super(client, action, new CreateTokenRequest()); + public CreateTokenRequestBuilder(ElasticsearchClient client) { + super(client, CreateTokenAction.INSTANCE, new CreateTokenRequest()); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserRequestBuilder.java index 23dd0bfc1e454..2f87eb63ddc6b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserRequestBuilder.java @@ -15,11 +15,7 @@ public class DeleteUserRequestBuilder extends ActionRequestBuilder { public DeleteUserRequestBuilder(ElasticsearchClient client) { - this(client, DeleteUserAction.INSTANCE); - } - - public DeleteUserRequestBuilder(ElasticsearchClient client, DeleteUserAction action) { - super(client, action, new DeleteUserRequest()); + super(client, DeleteUserAction.INSTANCE, new DeleteUserRequest()); } public DeleteUserRequestBuilder username(String username) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersRequestBuilder.java index f2e483f8590aa..a100d3c03a3ab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersRequestBuilder.java @@ -12,11 +12,7 @@ public class GetUsersRequestBuilder extends ActionRequestBuilder { public GetUsersRequestBuilder(ElasticsearchClient client) { - this(client, GetUsersAction.INSTANCE); - } - - public GetUsersRequestBuilder(ElasticsearchClient client, GetUsersAction action) { - super(client, action, new GetUsersRequest()); + super(client, GetUsersAction.INSTANCE, new GetUsersRequest()); } public GetUsersRequestBuilder usernames(String... usernames) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java index 60a6d04f2c50d..ef33decc9f79c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequestBuilder.java @@ -35,11 +35,7 @@ public class PutUserRequestBuilder extends ActionRequestBuilder { public PutUserRequestBuilder(ElasticsearchClient client) { - this(client, PutUserAction.INSTANCE); - } - - public PutUserRequestBuilder(ElasticsearchClient client, PutUserAction action) { - super(client, action, new PutUserRequest()); + super(client, PutUserAction.INSTANCE, new PutUserRequest()); } public PutUserRequestBuilder username(String username) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java index beade9fe50d3a..985bab5a0d1d9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java @@ -87,13 +87,8 @@ public void writeTo(StreamOutput out) throws IOException { } public static class RequestBuilder extends ActionRequestBuilder { - - public RequestBuilder(ElasticsearchClient client, GetCertificateInfoAction action) { - super(client, action, new Request()); - } - public RequestBuilder(ElasticsearchClient client) { - this(client, GetCertificateInfoAction.INSTANCE); + super(client, GetCertificateInfoAction.INSTANCE, new Request()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java index 5241ca3b888dc..38e59a2e34df8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/rest/RestGetCertificateInfoAction.java @@ -42,13 +42,11 @@ public String getName() { @Override protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { - return channel -> new GetCertificateInfoAction.RequestBuilder(client, GetCertificateInfoAction.INSTANCE).execute( - new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(Response response, XContentBuilder builder) throws Exception { - return new RestResponse(RestStatus.OK, response.toXContent(builder, request)); - } + return channel -> new GetCertificateInfoAction.RequestBuilder(client).execute(new RestBuilderListener<>(channel) { + @Override + public RestResponse buildResponse(Response response, XContentBuilder builder) throws Exception { + return new RestResponse(RestStatus.OK, response.toXContent(builder, request)); } - ); + }); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java index effc92efe0f8a..f2293ac6bd9a1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java @@ -76,8 +76,7 @@ public void testPutLicense() throws Exception { License signedLicense = generateSignedLicense(TimeValue.timeValueMinutes(2)); // put license - PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE) - .setLicense(signedLicense) + PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin()).setLicense(signedLicense) .setAcknowledge(true); PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get(); assertThat(putLicenseResponse.isAcknowledged(), equalTo(true)); @@ -93,9 +92,10 @@ public void testPutLicenseFromString() throws Exception { String licenseString = TestUtils.dumpLicense(signedLicense); // put license source - PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE) - .setLicense(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), XContentType.JSON) - .setAcknowledge(true); + PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin()).setLicense( + new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ).setAcknowledge(true); PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get(); assertThat(putLicenseResponse.isAcknowledged(), equalTo(true)); assertThat(putLicenseResponse.status(), equalTo(LicensesStatus.VALID)); @@ -115,7 +115,7 @@ public void testPutInvalidLicense() throws Exception { .validate() .build(); - PutLicenseRequestBuilder builder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE); + PutLicenseRequestBuilder builder = new PutLicenseRequestBuilder(clusterAdmin()); builder.setLicense(tamperedLicense); // try to put license (should be invalid) @@ -130,7 +130,7 @@ public void testPutInvalidLicense() throws Exception { public void testPutBasicLicenseIsInvalid() throws Exception { License signedLicense = generateSignedLicense("basic", License.VERSION_CURRENT, -1, TimeValue.timeValueMinutes(2)); - PutLicenseRequestBuilder builder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE); + PutLicenseRequestBuilder builder = new PutLicenseRequestBuilder(clusterAdmin()); builder.setLicense(signedLicense); // try to put license (should be invalid) @@ -144,7 +144,7 @@ public void testPutBasicLicenseIsInvalid() throws Exception { public void testPutExpiredLicense() throws Exception { License expiredLicense = generateExpiredNonBasicLicense(); - PutLicenseRequestBuilder builder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE); + PutLicenseRequestBuilder builder = new PutLicenseRequestBuilder(clusterAdmin()); builder.setLicense(expiredLicense); PutLicenseResponse putLicenseResponse = builder.get(); assertThat(putLicenseResponse.status(), equalTo(LicensesStatus.EXPIRED)); @@ -155,8 +155,7 @@ public void testPutExpiredLicense() throws Exception { public void testPutLicensesSimple() throws Exception { License goldSignedLicense = generateSignedLicense("gold", TimeValue.timeValueMinutes(5)); - PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE) - .setLicense(goldSignedLicense) + PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin()).setLicense(goldSignedLicense) .setAcknowledge(true); PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get(); assertThat(putLicenseResponse.status(), equalTo(LicensesStatus.VALID)); @@ -174,8 +173,7 @@ public void testPutLicensesSimple() throws Exception { public void testRemoveLicensesSimple() throws Exception { License goldLicense = generateSignedLicense("gold", TimeValue.timeValueMinutes(5)); - PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE) - .setLicense(goldLicense) + PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin()).setLicense(goldLicense) .setAcknowledge(true); PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get(); assertThat(putLicenseResponse.isAcknowledged(), equalTo(true)); @@ -183,10 +181,7 @@ public void testRemoveLicensesSimple() throws Exception { GetLicenseResponse getLicenseResponse = new GetLicenseRequestBuilder(clusterAdmin(), GetLicenseAction.INSTANCE).get(); assertThat(getLicenseResponse.license(), equalTo(goldLicense)); // delete all licenses - DeleteLicenseRequestBuilder deleteLicenseRequestBuilder = new DeleteLicenseRequestBuilder( - clusterAdmin(), - DeleteLicenseAction.INSTANCE - ); + DeleteLicenseRequestBuilder deleteLicenseRequestBuilder = new DeleteLicenseRequestBuilder(clusterAdmin()); AcknowledgedResponse deleteLicenseResponse = deleteLicenseRequestBuilder.get(); assertThat(deleteLicenseResponse.isAcknowledged(), equalTo(true)); // get licenses (expected no licenses) @@ -208,8 +203,7 @@ public void testLicenseIsRejectWhenStartDateLaterThanNow() throws Exception { .maxNodes(5); License license = TestUtils.generateSignedLicense(builder); - PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE) - .setLicense(license) + PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin()).setLicense(license) .setAcknowledge(true); PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get(); assertThat(putLicenseResponse.isAcknowledged(), equalTo(true)); @@ -230,8 +224,7 @@ public void testLicenseIsAcceptedWhenStartDateBeforeThanNow() throws Exception { .maxNodes(5); License license = TestUtils.generateSignedLicense(builder); - PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin(), PutLicenseAction.INSTANCE) - .setLicense(license) + PutLicenseRequestBuilder putLicenseRequestBuilder = new PutLicenseRequestBuilder(clusterAdmin()).setLicense(license) .setAcknowledge(true); PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get(); assertThat(putLicenseResponse.isAcknowledged(), equalTo(true)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index 9d959951383dd..cbe0ba99e57ce 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -10,11 +10,9 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; @@ -95,10 +93,10 @@ public void setUpMocks() { indicesAdminClient = mock(IndicesAdminClient.class); when(indicesAdminClient.prepareCreate(FIRST_CONCRETE_INDEX)).thenReturn( - new CreateIndexRequestBuilder(client, CreateIndexAction.INSTANCE, FIRST_CONCRETE_INDEX) + new CreateIndexRequestBuilder(client, FIRST_CONCRETE_INDEX) ); doAnswer(withResponse(new CreateIndexResponse(true, true, FIRST_CONCRETE_INDEX))).when(indicesAdminClient).create(any(), any()); - when(indicesAdminClient.prepareAliases()).thenReturn(new IndicesAliasesRequestBuilder(client, IndicesAliasesAction.INSTANCE)); + when(indicesAdminClient.prepareAliases()).thenReturn(new IndicesAliasesRequestBuilder(client)); doAnswer(withResponse(AcknowledgedResponse.TRUE)).when(indicesAdminClient).aliases(any(), any()); doAnswer(withResponse(AcknowledgedResponse.TRUE)).when(indicesAdminClient).putTemplate(any(), any()); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java index f6369be442837..af1f876906237 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -211,7 +210,7 @@ public void testCancellationDuringSearch() throws InterruptedException { // Emulation of search cancellation ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); - when(client.prepareSearch(any())).thenReturn(new SearchRequestBuilder(client, TransportSearchAction.TYPE).setIndices(indices)); + when(client.prepareSearch(any())).thenReturn(new SearchRequestBuilder(client).setIndices(indices)); doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") SearchRequest request = (SearchRequest) invocation.getArguments()[1]; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 2211512144f99..63fbb7176558c 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -790,10 +790,7 @@ public void testESFilter() throws Exception { long to = randomBoolean() ? Long.MAX_VALUE : randomLongBetween(from, from + 1000); QueryBuilder filter = new RangeQueryBuilder("val").from(from, true).to(to, true); try ( - EsqlQueryResponse results = new EsqlQueryRequestBuilder(client(), EsqlQueryAction.INSTANCE).query(command) - .filter(filter) - .pragmas(randomPragmas()) - .get() + EsqlQueryResponse results = new EsqlQueryRequestBuilder(client()).query(command).filter(filter).pragmas(randomPragmas()).get() ) { logger.info(results); OptionalDouble avg = docs.values().stream().filter(v -> from <= v && v <= to).mapToLong(n -> n).average(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index d85d600b4a259..2d1d01e42b509 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -266,9 +266,7 @@ private ActionFuture startEsql() { .put("status_interval", "0ms") .build() ); - return new EsqlQueryRequestBuilder(client(), EsqlQueryAction.INSTANCE).query("from test | stats sum(pause_me)") - .pragmas(pragmas) - .execute(); + return new EsqlQueryRequestBuilder(client()).query("from test | stats sum(pause_me)").pragmas(pragmas).execute(); } private void cancelTask(TaskId taskId) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java index 1d23fcbf8e05b..be3aeec190ded 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java @@ -14,12 +14,8 @@ public class EsqlQueryRequestBuilder extends ActionRequestBuilder { - public EsqlQueryRequestBuilder(ElasticsearchClient client, EsqlQueryAction action, EsqlQueryRequest request) { - super(client, action, request); - } - - public EsqlQueryRequestBuilder(ElasticsearchClient client, EsqlQueryAction action) { - this(client, action, new EsqlQueryRequest()); + public EsqlQueryRequestBuilder(ElasticsearchClient client) { + super(client, EsqlQueryAction.INSTANCE, new EsqlQueryRequest()); } public EsqlQueryRequestBuilder query(String query) { diff --git a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 25c22672cf81c..4be64fd852dc0 100644 --- a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; -import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.graph.action.GraphExploreRequestBuilder; import org.elasticsearch.xpack.graph.Graph; @@ -117,7 +116,7 @@ protected Collection> getPlugins() { } public void testSignificanceQueryCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles grb.createNextHop(null).addVertexRequest("people").size(100).minDocCount(1); // friends of members of beatles @@ -145,7 +144,7 @@ protected Settings nodeSettings() { public void testTargetedQueryCrawl() { // Tests use of a client-provided query to steer exploration - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles // 70s friends of beatles @@ -161,7 +160,7 @@ public void testTargetedQueryCrawl() { } public void testLargeNumberTermsStartCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); Hop hop1 = grb.createNextHop(null); VertexRequest peopleNames = hop1.addVertexRequest("people").minDocCount(1); peopleNames.addInclude("john", 1); @@ -179,7 +178,7 @@ public void testLargeNumberTermsStartCrawl() { } public void testTargetedQueryCrawlDepth2() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles // 00s friends of beatles @@ -196,7 +195,7 @@ public void testTargetedQueryCrawlDepth2() { } public void testPopularityQueryCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); // Turning off the significance feature means we reward popularity grb.useSignificance(false); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); @@ -213,7 +212,7 @@ public void testPopularityQueryCrawl() { } public void testTimedoutQueryCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); grb.setTimeout(TimeValue.timeValueMillis(400)); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles @@ -237,7 +236,7 @@ public void testTimedoutQueryCrawl() { } public void testNonDiversifiedCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); Hop hop1 = grb.createNextHop(QueryBuilders.termsQuery("people", "dave", "other")); hop1.addVertexRequest("people").size(10).minDocCount(1); @@ -249,7 +248,7 @@ public void testNonDiversifiedCrawl() { } public void testDiversifiedCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); grb.sampleDiversityField("description").maxDocsPerDiversityValue(1); Hop hop1 = grb.createNextHop(QueryBuilders.termsQuery("people", "dave", "other")); @@ -262,7 +261,7 @@ public void testDiversifiedCrawl() { } public void testInvalidDiversifiedCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); grb.sampleDiversityField("description").maxDocsPerDiversityValue(1); Hop hop1 = grb.createNextHop(QueryBuilders.termsQuery("people", "roy", "other")); @@ -283,10 +282,7 @@ public void testInvalidDiversifiedCrawl() { } public void testMappedAndUnmappedQueryCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices( - "test", - "idx_unmapped" - ); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test", "idx_unmapped"); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); // members of beatles grb.createNextHop(null).addVertexRequest("people").size(100).minDocCount(1); // friends of members of beatles @@ -301,7 +297,7 @@ public void testMappedAndUnmappedQueryCrawl() { } public void testUnmappedQueryCrawl() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("idx_unmapped"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("idx_unmapped"); Hop hop1 = grb.createNextHop(QueryBuilders.termQuery("description", "beatles")); hop1.addVertexRequest("people").size(10).minDocCount(1); @@ -312,7 +308,7 @@ public void testUnmappedQueryCrawl() { } public void testRequestValidation() { - GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client(), GraphExploreAction.INSTANCE).setIndices("test"); + GraphExploreRequestBuilder grb = new GraphExploreRequestBuilder(client()).setIndices("test"); try { grb.get(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 410838e2b0263..3b3134fe3d92e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -231,7 +230,7 @@ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() { private Client mockBulkClient() { var client = mockClient(); - when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client, BulkAction.INSTANCE)); + when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client)); return client; } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index 209f12c2e90ce..858c5ba946f78 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -311,9 +311,11 @@ protected static List fetchAllAuditMessages(String jobId) throws Excepti RefreshResponse refreshResponse = client().execute(RefreshAction.INSTANCE, refreshRequest).actionGet(); assertThat(refreshResponse.getStatus().getStatus(), anyOf(equalTo(200), equalTo(201))); - SearchRequest searchRequest = new SearchRequestBuilder(client(), TransportSearchAction.TYPE).setIndices( - NotificationsIndex.NOTIFICATIONS_INDEX - ).addSort("timestamp", SortOrder.ASC).setQuery(QueryBuilders.termQuery("job_id", jobId)).setSize(100).request(); + SearchRequest searchRequest = new SearchRequestBuilder(client()).setIndices(NotificationsIndex.NOTIFICATIONS_INDEX) + .addSort("timestamp", SortOrder.ASC) + .setQuery(QueryBuilders.termQuery("job_id", jobId)) + .setSize(100) + .request(); List messages = new ArrayList<>(); assertResponse( client().execute(TransportSearchAction.TYPE, searchRequest), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java index 42766f6ebf12b..34ea3a1fad04e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; @@ -30,7 +29,7 @@ class AggregationDataExtractor extends AbstractAggregationDataExtractor new SearchRequestBuilder(client, TransportSearchAction.TYPE).setSource(searchSourceBuilder) + return (searchSourceBuilder) -> new SearchRequestBuilder(client).setSource(searchSourceBuilder) .setIndicesOptions(indicesOptions) .setAllowPartialSearchResults(false) .setIndices(indices); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java index 03dda0a8201b7..d5259c2454011 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; @@ -279,7 +278,7 @@ private SearchSourceBuilder rangeSearchBuilder() { } private SearchRequestBuilder rangeSearchRequest() { - return new SearchRequestBuilder(client, TransportSearchAction.TYPE).setIndices(context.indices) + return new SearchRequestBuilder(client).setIndices(context.indices) .setIndicesOptions(context.indicesOptions) .setSource(rangeSearchBuilder()) .setAllowPartialSearchResults(false) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index 8757e1afd8123..c721462697b65 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -15,8 +15,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.search.TransportClearScrollAction; -import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.TimeValue; @@ -154,7 +152,7 @@ private SearchRequestBuilder buildSearchRequest(long start) { .query(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.extractedFields.timeField(), start, context.end)) .runtimeMappings(context.runtimeMappings); - SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client, TransportSearchAction.TYPE).setScroll(SCROLL_TIMEOUT) + SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client).setScroll(SCROLL_TIMEOUT) .setIndices(context.indices) .setIndicesOptions(context.indicesOptions) .setAllowPartialSearchResults(false) @@ -250,9 +248,7 @@ protected SearchResponse executeSearchScrollRequest(String scrollId) { context.headers, ClientHelper.ML_ORIGIN, client, - () -> new SearchScrollRequestBuilder(client, TransportSearchScrollAction.TYPE).setScroll(SCROLL_TIMEOUT) - .setScrollId(scrollId) - .get() + () -> new SearchScrollRequestBuilder(client).setScroll(SCROLL_TIMEOUT).setScrollId(scrollId).get() ); try { checkForSkippedClusters(searchResponse); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index 7ea6fdf55ca0a..ab6ee250df5c6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -127,7 +127,7 @@ public Optional> next() throws IOException { */ public void preview(ActionListener> listener) { - SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client, TransportSearchAction.TYPE) + SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client) // This ensures the search throws if there are failures and the scroll context gets cleared automatically .setAllowPartialSearchResults(false) .setIndices(context.indices) @@ -203,7 +203,7 @@ private SearchRequestBuilder buildSearchRequest() { LOGGER.trace(() -> format("[%s] Searching docs with [%s] in [%s, %s)", context.jobId, INCREMENTAL_ID, from, to)); - SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client, TransportSearchAction.TYPE) + SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client) // This ensures the search throws if there are failures and the scroll context gets cleared automatically .setAllowPartialSearchResults(false) .addSort(DestinationIndex.INCREMENTAL_ID, SortOrder.ASC) @@ -401,7 +401,7 @@ private SearchRequestBuilder buildDataSummarySearchRequestBuilder() { summaryQuery = QueryBuilders.boolQuery().filter(summaryQuery).filter(allExtractedFieldsExistQuery()); } - return new SearchRequestBuilder(client, TransportSearchAction.TYPE).setAllowPartialSearchResults(false) + return new SearchRequestBuilder(client).setAllowPartialSearchResults(false) .setIndices(context.indices) .setSize(0) .setQuery(summaryQuery) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java index 88354249786ca..4f1a99f634a0a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java @@ -139,7 +139,7 @@ public void testCancelDownloadTaskCallsOnResponseWithTheCancelResponseWhenATaskE private static void mockCancelTask(Client client) { var cluster = client.admin().cluster(); - when(cluster.prepareCancelTasks()).thenReturn(new CancelTasksRequestBuilder(client, CancelTasksAction.INSTANCE)); + when(cluster.prepareCancelTasks()).thenReturn(new CancelTasksRequestBuilder(client)); } private static void mockCancelTasksResponse(Client client, CancelTasksResponse response) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java index 93d20c0c7be49..037976f9dec9b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -122,8 +121,7 @@ public void setUpTests() { .subAggregation(AggregationBuilders.avg("responsetime").field("responsetime")); runtimeMappings = Collections.emptyMap(); timingStatsReporter = new DatafeedTimingStatsReporter(new DatafeedTimingStats(jobId), mock(DatafeedTimingStatsPersister.class)); - aggregatedSearchRequestBuilder = (searchSourceBuilder) -> new SearchRequestBuilder(testClient, TransportSearchAction.TYPE) - .setSource(searchSourceBuilder) + aggregatedSearchRequestBuilder = (searchSourceBuilder) -> new SearchRequestBuilder(testClient).setSource(searchSourceBuilder) .setAllowPartialSearchResults(false) .setIndices(indices.toArray(String[]::new)); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index e9cc63e4dd96e..0fe693490d466 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -16,8 +16,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportMultiSearchAction; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -757,7 +755,7 @@ public void testDatafeedTimingStats_MultipleDocumentsAtOnce() throws IOException try { Client client = getBasicMockedClient(); - when(client.prepareMultiSearch()).thenReturn(new MultiSearchRequestBuilder(client, TransportMultiSearchAction.TYPE)); + when(client.prepareMultiSearch()).thenReturn(new MultiSearchRequestBuilder(client)); doAnswer(invocationOnMock -> { MultiSearchRequest multiSearchRequest = (MultiSearchRequest) invocationOnMock.getArguments()[0]; assertThat(multiSearchRequest.requests(), hasSize(2)); @@ -770,10 +768,10 @@ public void testDatafeedTimingStats_MultipleDocumentsAtOnce() throws IOException return null; }).when(client).multiSearch(any(), any()); when(client.prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName("foo"))).thenReturn( - new SearchRequestBuilder(client, TransportSearchAction.TYPE).setIndices(AnomalyDetectorsIndex.jobResultsAliasedName("foo")) + new SearchRequestBuilder(client).setIndices(AnomalyDetectorsIndex.jobResultsAliasedName("foo")) ); when(client.prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName("bar"))).thenReturn( - new SearchRequestBuilder(client, TransportSearchAction.TYPE).setIndices(AnomalyDetectorsIndex.jobResultsAliasedName("bar")) + new SearchRequestBuilder(client).setIndices(AnomalyDetectorsIndex.jobResultsAliasedName("bar")) ); JobResultsProvider provider = createProvider(client); @@ -842,9 +840,7 @@ public void testDatafeedTimingStats_Ok() throws IOException { SearchResponse response = createSearchResponse(source); Client client = getMockedClient(queryBuilder -> assertThat(queryBuilder.getName(), equalTo("ids")), response); - when(client.prepareSearch(indexName)).thenReturn( - new SearchRequestBuilder(client, TransportSearchAction.TYPE).setIndices(indexName) - ); + when(client.prepareSearch(indexName)).thenReturn(new SearchRequestBuilder(client).setIndices(indexName)); JobResultsProvider provider = createProvider(client); ExponentialAverageCalculationContext contextFoo = new ExponentialAverageCalculationContext( 600.0, @@ -871,9 +867,7 @@ public void testDatafeedTimingStats_NotFound() throws IOException { SearchResponse response = createSearchResponse(source); Client client = getMockedClient(queryBuilder -> assertThat(queryBuilder.getName(), equalTo("ids")), response); - when(client.prepareSearch(indexName)).thenReturn( - new SearchRequestBuilder(client, TransportSearchAction.TYPE).setIndices(indexName) - ); + when(client.prepareSearch(indexName)).thenReturn(new SearchRequestBuilder(client).setIndices(indexName)); JobResultsProvider provider = createProvider(client); provider.datafeedTimingStats("foo", stats -> assertThat(stats, equalTo(new DatafeedTimingStats("foo"))), e -> { throw new AssertionError("Failure getting datafeed timing stats", e); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TaskRetrieverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TaskRetrieverTests.java index 3e962aa4f0d19..719a9be43080f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TaskRetrieverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TaskRetrieverTests.java @@ -150,7 +150,7 @@ public static Client mockListTasksClient(ThreadPool threadPool) { public static Client mockListTasksClient(Client client) { var cluster = client.admin().cluster(); - when(cluster.prepareListTasks()).thenReturn(new ListTasksRequestBuilder(client, TransportListTasksAction.TYPE)); + when(cluster.prepareListTasks()).thenReturn(new ListTasksRequestBuilder(client)); return client; } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java index c9b1138a18eac..d38bdf74e2531 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryRequestBuilder; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; @@ -110,9 +109,7 @@ public void testDoCollect() throws Exception { } final RecoveryResponse recoveryResponse = new RecoveryResponse(randomInt(), randomInt(), randomInt(), recoveryStates, emptyList()); - final RecoveryRequestBuilder recoveryRequestBuilder = spy( - new RecoveryRequestBuilder(mock(ElasticsearchClient.class), RecoveryAction.INSTANCE) - ); + final RecoveryRequestBuilder recoveryRequestBuilder = spy(new RecoveryRequestBuilder(mock(ElasticsearchClient.class))); doReturn(recoveryResponse).when(recoveryRequestBuilder).get(); final IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); @@ -209,9 +206,7 @@ public void testDoCollectThrowsTimeoutException() throws Exception { ) ); - final RecoveryRequestBuilder recoveryRequestBuilder = spy( - new RecoveryRequestBuilder(mock(ElasticsearchClient.class), RecoveryAction.INSTANCE) - ); + final RecoveryRequestBuilder recoveryRequestBuilder = spy(new RecoveryRequestBuilder(mock(ElasticsearchClient.class))); doReturn(recoveryResponse).when(recoveryRequestBuilder).get(); final IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java index 3ea67afbb0f5a..a17d90d250609 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.indices.stats.IndexStats; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; @@ -117,9 +116,7 @@ public void testDoCollect() throws Exception { final String[] indexNames = indicesMetadata.keySet().toArray(new String[0]); when(metadata.getConcreteAllIndices()).thenReturn(indexNames); - final IndicesStatsRequestBuilder indicesStatsRequestBuilder = spy( - new IndicesStatsRequestBuilder(mock(ElasticsearchClient.class), IndicesStatsAction.INSTANCE) - ); + final IndicesStatsRequestBuilder indicesStatsRequestBuilder = spy(new IndicesStatsRequestBuilder(mock(ElasticsearchClient.class))); doReturn(indicesStatsResponse).when(indicesStatsRequestBuilder).get(); final IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); @@ -196,9 +193,7 @@ public void testDoCollectThrowsTimeoutException() throws Exception { ) } ); - final IndicesStatsRequestBuilder indicesStatsRequestBuilder = spy( - new IndicesStatsRequestBuilder(mock(ElasticsearchClient.class), IndicesStatsAction.INSTANCE) - ); + final IndicesStatsRequestBuilder indicesStatsRequestBuilder = spy(new IndicesStatsRequestBuilder(mock(ElasticsearchClient.class))); doReturn(indicesStatsResponse).when(indicesStatsRequestBuilder).get(); final IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 3dfe440d56831..673956199677b 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.get.GetAction; @@ -75,7 +74,6 @@ import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.core.security.action.role.RoleDescriptorRequestValidator; -import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequestBuilder; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; @@ -1654,8 +1652,7 @@ public void testDerivedKeys() throws ExecutionException, InterruptedException { clientKey1 = client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue)); } else { final CreateTokenResponse createTokenResponse = new CreateTokenRequestBuilder( - client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue)), - CreateTokenAction.INSTANCE + client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue)) ).setGrantType("client_credentials").get(); clientKey1 = client().filterWithHeader(Map.of("Authorization", "Bearer " + createTokenResponse.getTokenString())); } @@ -2926,7 +2923,7 @@ private String getBase64EncodedApiKeyValue(String id, SecureString key) { } private void assertApiKeyNotCreated(Client client, String keyName) throws ExecutionException, InterruptedException { - new RefreshRequestBuilder(client, RefreshAction.INSTANCE).setIndices(SECURITY_MAIN_ALIAS).execute().get(); + new RefreshRequestBuilder(client).setIndices(SECURITY_MAIN_ALIAS).execute().get(); assertEquals( 0, client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.builder().apiKeyName(keyName).ownedByAuthenticatedUser(false).build()) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java index 9323299a4d9c5..9b1350a43f9a2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java @@ -60,7 +60,6 @@ import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenAction; import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenRequest; import org.elasticsearch.xpack.core.security.action.service.CreateServiceAccountTokenResponse; -import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequestBuilder; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; @@ -276,8 +275,7 @@ public void testGetApiKeyWorksForTheApiKeyItself() { clientKey1 = client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue)); } else { final CreateTokenResponse createTokenResponse = new CreateTokenRequestBuilder( - client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue)), - CreateTokenAction.INSTANCE + client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue)) ).setGrantType("client_credentials").get(); clientKey1 = client().filterWithHeader(Map.of("Authorization", "Bearer " + createTokenResponse.getTokenString())); } @@ -333,8 +331,7 @@ public void testGrantApiKeyForUserWithRunAs() throws IOException { assertThat( expectThrows( ElasticsearchSecurityException.class, - () -> new CreateTokenRequestBuilder(clientWithGrantedKey, CreateTokenAction.INSTANCE).setGrantType("client_credentials") - .get() + () -> new CreateTokenRequestBuilder(clientWithGrantedKey).setGrantType("client_credentials").get() ).getMessage(), containsString("unauthorized") ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreCacheTests.java index c0605d9e9380e..d11ca70744b7b 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreCacheTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; @@ -304,14 +303,14 @@ public void testRolesCacheIsClearedWhenPrivilegesIsChanged() { "Basic " + Base64.getEncoder().encodeToString((testRoleCacheUser + ":longerpassword").getBytes(StandardCharsets.UTF_8)) ) ); - new ClusterHealthRequestBuilder(testRoleCacheUserClient, ClusterHealthAction.INSTANCE).get(); + new ClusterHealthRequestBuilder(testRoleCacheUserClient).get(); // Directly deleted the role document final DeleteResponse deleteResponse = client.prepareDelete(SECURITY_MAIN_ALIAS, "role-" + testRole).get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); // The cluster health action can still success since the role is cached - new ClusterHealthRequestBuilder(testRoleCacheUserClient, ClusterHealthAction.INSTANCE).get(); + new ClusterHealthRequestBuilder(testRoleCacheUserClient).get(); // Change an application privilege which triggers role cache invalidation as well if (randomBoolean()) { @@ -320,11 +319,7 @@ public void testRolesCacheIsClearedWhenPrivilegesIsChanged() { addApplicationPrivilege("app-3", "read", "r:q:r:s"); } // Since role cache is cleared, the cluster health action is no longer authorized - expectThrows( - ElasticsearchSecurityException.class, - () -> new ClusterHealthRequestBuilder(testRoleCacheUserClient, ClusterHealthAction.INSTANCE).get() - ); - + expectThrows(ElasticsearchSecurityException.class, () -> new ClusterHealthRequestBuilder(testRoleCacheUserClient).get()); } private HasPrivilegesResponse checkPrivilege(String applicationName, String privilegeName) { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreSingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreSingleNodeTests.java index a3bcc19bca160..2503f2fc17d20 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreSingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreSingleNodeTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; @@ -25,12 +24,10 @@ import org.elasticsearch.xpack.core.security.action.privilege.GetPrivilegesResponse; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequest; -import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequestBuilder; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; import org.elasticsearch.xpack.core.security.action.user.AuthenticateResponse; -import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequestBuilder; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; @@ -80,7 +77,7 @@ public void configureApplicationPrivileges() { public void testResolvePrivilegesWorkWhenExpensiveQueriesAreDisabled() throws IOException { // Disable expensive query - new ClusterUpdateSettingsRequestBuilder(client(), ClusterUpdateSettingsAction.INSTANCE).setTransientSettings( + new ClusterUpdateSettingsRequestBuilder(client()).setTransientSettings( Settings.builder().put(ALLOW_EXPENSIVE_QUERIES.getKey(), false) ).get(); @@ -106,7 +103,7 @@ public void testResolvePrivilegesWorkWhenExpensiveQueriesAreDisabled() throws IO ); // User role resolution works with wildcard application name - new PutRoleRequestBuilder(client(), PutRoleAction.INSTANCE).source("app_user_role", new BytesArray(""" + new PutRoleRequestBuilder(client()).source("app_user_role", new BytesArray(""" { "cluster": ["manage_own_api_key"], "applications": [ @@ -124,7 +121,7 @@ public void testResolvePrivilegesWorkWhenExpensiveQueriesAreDisabled() throws IO } """), XContentType.JSON).get(); - new PutUserRequestBuilder(client(), PutUserAction.INSTANCE).username("app_user") + new PutUserRequestBuilder(client()).username("app_user") .password(TEST_PASSWORD_SECURE_STRING, getFastStoredHashAlgoForTests()) .roles("app_user_role") .get(); @@ -187,7 +184,7 @@ public void testResolvePrivilegesWorkWhenExpensiveQueriesAreDisabled() throws IO assertThat(authenticateResponse.authentication().getEffectiveSubject().getUser().principal(), equalTo("app_user")); } finally { // Reset setting since test suite expects things in a clean slate - new ClusterUpdateSettingsRequestBuilder(client(), ClusterUpdateSettingsAction.INSTANCE).setTransientSettings( + new ClusterUpdateSettingsRequestBuilder(client()).setTransientSettings( Settings.builder().putNull(ALLOW_EXPENSIVE_QUERIES.getKey()) ).get(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java index 2a6fad9c81f53..ac52ef6864f9f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutActionTests.java @@ -9,12 +9,10 @@ import com.nimbusds.jwt.JWT; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; @@ -22,7 +20,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.internal.Client; @@ -118,22 +115,22 @@ public void setup() throws Exception { when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); doAnswer(invocationOnMock -> { - GetRequestBuilder builder = new GetRequestBuilder(client, GetAction.INSTANCE); + GetRequestBuilder builder = new GetRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]).setId((String) invocationOnMock.getArguments()[1]); return builder; }).when(client).prepareGet(nullable(String.class), nullable(String.class)); doAnswer(invocationOnMock -> { - IndexRequestBuilder builder = new IndexRequestBuilder(client, IndexAction.INSTANCE); + IndexRequestBuilder builder = new IndexRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]); return builder; }).when(client).prepareIndex(nullable(String.class)); doAnswer(invocationOnMock -> { - UpdateRequestBuilder builder = new UpdateRequestBuilder(client, UpdateAction.INSTANCE); + UpdateRequestBuilder builder = new UpdateRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]).setId((String) invocationOnMock.getArguments()[1]); return builder; }).when(client).prepareUpdate(nullable(String.class), anyString()); doAnswer(invocationOnMock -> { - BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE); + BulkRequestBuilder builder = new BulkRequestBuilder(client); return builder; }).when(client).prepareBulk(); doAnswer(invocationOnMock -> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index e3631a785b9f3..fb3296d6dec7b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -7,15 +7,12 @@ package org.elasticsearch.xpack.security.action.saml; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.get.MultiGetItemResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequestBuilder; @@ -26,7 +23,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.internal.Client; @@ -135,25 +131,25 @@ public void setup() throws Exception { when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); doAnswer(invocationOnMock -> { - GetRequestBuilder builder = new GetRequestBuilder(client, GetAction.INSTANCE); + GetRequestBuilder builder = new GetRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]).setId((String) invocationOnMock.getArguments()[1]); return builder; }).when(client).prepareGet(nullable(String.class), nullable(String.class)); doAnswer(invocationOnMock -> { - IndexRequestBuilder builder = new IndexRequestBuilder(client, IndexAction.INSTANCE); + IndexRequestBuilder builder = new IndexRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]); return builder; }).when(client).prepareIndex(nullable(String.class)); doAnswer(invocationOnMock -> { - UpdateRequestBuilder builder = new UpdateRequestBuilder(client, UpdateAction.INSTANCE); + UpdateRequestBuilder builder = new UpdateRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]).setId((String) invocationOnMock.getArguments()[1]); return builder; }).when(client).prepareUpdate(nullable(String.class), nullable(String.class)); doAnswer(invocationOnMock -> { - BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE); + BulkRequestBuilder builder = new BulkRequestBuilder(client); return builder; }).when(client).prepareBulk(); - when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); + when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client)); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; MultiGetResponse response = mock(MultiGetResponse.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenActionTests.java index fe48408b39c54..08fd03d1ac115 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenActionTests.java @@ -10,10 +10,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.get.MultiGetItemResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequestBuilder; @@ -24,7 +22,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; @@ -107,11 +104,11 @@ public void setupClient() { when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(SETTINGS); doAnswer(invocationOnMock -> { - GetRequestBuilder builder = new GetRequestBuilder(client, GetAction.INSTANCE); + GetRequestBuilder builder = new GetRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]).setId((String) invocationOnMock.getArguments()[1]); return builder; }).when(client).prepareGet(anyString(), anyString()); - when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); + when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client)); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; @@ -129,10 +126,8 @@ public void setupClient() { listener.onResponse(response); return Void.TYPE; }).when(client).multiGet(any(MultiGetRequest.class), anyActionListener()); - when(client.prepareIndex(nullable(String.class))).thenReturn(new IndexRequestBuilder(client, IndexAction.INSTANCE)); - when(client.prepareUpdate(any(String.class), any(String.class))).thenReturn( - new UpdateRequestBuilder(client, UpdateAction.INSTANCE) - ); + when(client.prepareIndex(nullable(String.class))).thenReturn(new IndexRequestBuilder(client)); + when(client.prepareUpdate(any(String.class), any(String.class))).thenReturn(new UpdateRequestBuilder(client)); doAnswer(invocationOnMock -> { idxReqReference.set((IndexRequest) invocationOnMock.getArguments()[1]); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index fe433560652da..2031cd4f7685b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -24,16 +24,13 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; @@ -245,8 +242,8 @@ public void testCreateApiKeyUsesBulkIndexAction() throws Exception { .realmRef(new RealmRef("file", "file", "node-1")) .build(false); final CreateApiKeyRequest createApiKeyRequest = new CreateApiKeyRequest("key-1", null, null); - when(client.prepareIndex(anyString())).thenReturn(new IndexRequestBuilder(client, IndexAction.INSTANCE)); - when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client, BulkAction.INSTANCE)); + when(client.prepareIndex(anyString())).thenReturn(new IndexRequestBuilder(client)); + when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client)); when(client.threadPool()).thenReturn(threadPool); final AtomicBoolean bulkActionInvoked = new AtomicBoolean(false); doAnswer(inv -> { @@ -271,7 +268,7 @@ public void testGetApiKeys() throws Exception { when(clock.instant()).thenReturn(Instant.ofEpochMilli(now)); final Settings settings = Settings.builder().put(XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.getKey(), true).build(); when(client.threadPool()).thenReturn(threadPool); - SearchRequestBuilder searchRequestBuilder = Mockito.spy(new SearchRequestBuilder(client, TransportSearchAction.TYPE)); + SearchRequestBuilder searchRequestBuilder = Mockito.spy(new SearchRequestBuilder(client)); when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(searchRequestBuilder); final ApiKeyService service = createApiKeyService(settings); final AtomicReference searchRequest = new AtomicReference<>(); @@ -332,7 +329,7 @@ public void testGetApiKeys() throws Exception { public void testInvalidateApiKeys() throws Exception { final Settings settings = Settings.builder().put(XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.getKey(), true).build(); when(client.threadPool()).thenReturn(threadPool); - SearchRequestBuilder searchRequestBuilder = Mockito.spy(new SearchRequestBuilder(client, TransportSearchAction.TYPE)); + SearchRequestBuilder searchRequestBuilder = Mockito.spy(new SearchRequestBuilder(client)); when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(searchRequestBuilder); final ApiKeyService service = createApiKeyService(settings); final AtomicReference searchRequest = new AtomicReference<>(); @@ -406,7 +403,7 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { // Mock the search request for keys to invalidate when(client.threadPool()).thenReturn(threadPool); - when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(new SearchRequestBuilder(client, TransportSearchAction.TYPE)); + when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(new SearchRequestBuilder(client)); doAnswer(invocation -> { final var listener = (ActionListener) invocation.getArguments()[1]; final var searchHit = new SearchHit(docId, apiKeyId); @@ -445,8 +442,8 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { }).when(client).search(any(SearchRequest.class), anyActionListener()); // Capture the Update request so that we can verify it is configured as expected - when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client, BulkAction.INSTANCE)); - final var updateRequestBuilder = Mockito.spy(new UpdateRequestBuilder(client, UpdateAction.INSTANCE)); + when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client)); + final var updateRequestBuilder = Mockito.spy(new UpdateRequestBuilder(client)); when(client.prepareUpdate(eq(SECURITY_MAIN_ALIAS), eq(apiKeyId))).thenReturn(updateRequestBuilder); // Stub bulk and cache clearing calls so that the entire action flow can complete (not strictly necessary but nice to have) @@ -502,8 +499,8 @@ public void testCreateApiKeyWillCacheOnCreation() { .realmRef(new RealmRef(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8))) .build(false); final CreateApiKeyRequest createApiKeyRequest = new CreateApiKeyRequest(randomAlphaOfLengthBetween(3, 8), null, null); - when(client.prepareIndex(anyString())).thenReturn(new IndexRequestBuilder(client, IndexAction.INSTANCE)); - when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client, BulkAction.INSTANCE)); + when(client.prepareIndex(anyString())).thenReturn(new IndexRequestBuilder(client)); + when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client)); when(client.threadPool()).thenReturn(threadPool); doAnswer(inv -> { final Object[] args = inv.getArguments(); @@ -739,7 +736,7 @@ public void testCrossClusterApiKeyUsageStats() { final Instant now = Instant.now(); when(clock.instant()).thenReturn(now); when(client.threadPool()).thenReturn(threadPool); - SearchRequestBuilder searchRequestBuilder = Mockito.spy(new SearchRequestBuilder(client, TransportSearchAction.TYPE)); + SearchRequestBuilder searchRequestBuilder = Mockito.spy(new SearchRequestBuilder(client)); when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(searchRequestBuilder); final List searchHits = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 34e88be58de3b..e899102b2d363 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -14,18 +14,15 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -287,10 +284,8 @@ public void init() throws Exception { threadContext = threadPool.getThreadContext(); when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); - when(client.prepareIndex(nullable(String.class))).thenReturn(new IndexRequestBuilder(client, IndexAction.INSTANCE)); - when(client.prepareUpdate(nullable(String.class), nullable(String.class))).thenReturn( - new UpdateRequestBuilder(client, UpdateAction.INSTANCE) - ); + when(client.prepareIndex(nullable(String.class))).thenReturn(new IndexRequestBuilder(client)); + when(client.prepareUpdate(nullable(String.class), nullable(String.class))).thenReturn(new UpdateRequestBuilder(client)); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") ActionListener responseActionListener = (ActionListener) invocationOnMock.getArguments()[2]; @@ -307,7 +302,7 @@ public void init() throws Exception { return null; }).when(client).execute(eq(IndexAction.INSTANCE), any(IndexRequest.class), anyActionListener()); doAnswer(invocationOnMock -> { - GetRequestBuilder builder = new GetRequestBuilder(client, GetAction.INSTANCE); + GetRequestBuilder builder = new GetRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]).setId((String) invocationOnMock.getArguments()[1]); return builder; }).when(client).prepareGet(nullable(String.class), nullable(String.class)); @@ -1910,7 +1905,7 @@ public void testAuthenticateWithToken() throws Exception { ); } String token = tokenFuture.get().getAccessToken(); - when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); + when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client)); mockGetTokenFromAccessTokenBytes(tokenService, newTokenBytes.v1(), expected, Map.of(), false, null, client); when(securityIndex.defensiveCopy()).thenReturn(securityIndex); when(securityIndex.isAvailable(SecurityIndexManager.Availability.PRIMARY_SHARDS)).thenReturn(true); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 0188907462fc7..5ef9749a5cd08 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -16,12 +16,10 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.UnavailableShardsException; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; @@ -32,9 +30,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; @@ -163,18 +159,18 @@ public void setupClient() { when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); doAnswer(invocationOnMock -> { - GetRequestBuilder builder = new GetRequestBuilder(client, GetAction.INSTANCE); + GetRequestBuilder builder = new GetRequestBuilder(client); builder.setIndex((String) invocationOnMock.getArguments()[0]).setId((String) invocationOnMock.getArguments()[1]); return builder; }).when(client).prepareGet(anyString(), anyString()); - when(client.prepareIndex(any(String.class))).thenReturn(new IndexRequestBuilder(client, IndexAction.INSTANCE)); - when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client, BulkAction.INSTANCE)); + when(client.prepareIndex(any(String.class))).thenReturn(new IndexRequestBuilder(client)); + when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client)); when(client.prepareUpdate(any(String.class), any(String.class))).thenAnswer(inv -> { final String index = (String) inv.getArguments()[0]; final String id = (String) inv.getArguments()[1]; - return new UpdateRequestBuilder(client, UpdateAction.INSTANCE).setIndex(index).setId(id); + return new UpdateRequestBuilder(client).setIndex(index).setId(id); }); - when(client.prepareSearch(any(String.class))).thenReturn(new SearchRequestBuilder(client, TransportSearchAction.TYPE)); + when(client.prepareSearch(any(String.class))).thenReturn(new SearchRequestBuilder(client)); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") ActionListener responseActionListener = (ActionListener) invocationOnMock.getArguments()[2]; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index d083c1700c302..bcb335c7cf9bc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; @@ -194,9 +193,7 @@ public void testResolveRolesDoesNotUseLastLoadCacheWhenSecurityIndexAvailable() final ThreadPool mockThreadPool = mock(ThreadPool.class); when(mockThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(client.threadPool()).thenReturn(mockThreadPool); - when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn( - Mockito.spy(new SearchRequestBuilder(client, TransportSearchAction.TYPE)) - ); + when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(Mockito.spy(new SearchRequestBuilder(client))); final ExpressionRoleMapping mapping = new ExpressionRoleMapping( "mapping", new FieldExpression("dn", Collections.singletonList(new FieldValue("*"))), @@ -238,9 +235,7 @@ public void testResolveRolesUsesLastLoadCacheWhenSecurityIndexUnavailable() thro final ThreadPool mockThreadPool = mock(ThreadPool.class); when(mockThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(client.threadPool()).thenReturn(mockThreadPool); - when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn( - Mockito.spy(new SearchRequestBuilder(client, TransportSearchAction.TYPE)) - ); + when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(Mockito.spy(new SearchRequestBuilder(client))); final ExpressionRoleMapping mapping = new ExpressionRoleMapping( "mapping", new FieldExpression("dn", Collections.singletonList(new FieldValue("*"))), @@ -302,9 +297,7 @@ public void testResolveRolesDoesNotUseLastLoadCacheWhenSecurityIndexDoesNotExist final ThreadPool mockThreadPool = mock(ThreadPool.class); when(mockThreadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); when(client.threadPool()).thenReturn(mockThreadPool); - when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn( - Mockito.spy(new SearchRequestBuilder(client, TransportSearchAction.TYPE)) - ); + when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(Mockito.spy(new SearchRequestBuilder(client))); final ExpressionRoleMapping mapping = new ExpressionRoleMapping( "mapping", new FieldExpression("dn", Collections.singletonList(new FieldValue("*"))), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index 17f1268b7f5e8..496b2245ad80b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.action.get.MultiGetItemResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetResponse; -import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchRequestBuilder; @@ -196,9 +195,7 @@ public void prepare() { ); this.client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); - when(client.prepareSearch(SECURITY_PROFILE_ALIAS)).thenReturn( - new SearchRequestBuilder(client, TransportSearchAction.TYPE).setIndices(SECURITY_PROFILE_ALIAS) - ); + when(client.prepareSearch(SECURITY_PROFILE_ALIAS)).thenReturn(new SearchRequestBuilder(client).setIndices(SECURITY_PROFILE_ALIAS)); this.profileIndex = SecurityMocks.mockSecurityIndexManager(SECURITY_PROFILE_ALIAS); final ClusterService clusterService = mock(ClusterService.class); final ClusterState clusterState = mock(ClusterState.class); @@ -598,9 +595,7 @@ public void testSecurityProfileOrigin() { return null; }).when(client).execute(eq(TransportMultiSearchAction.TYPE), any(MultiSearchRequest.class), anyActionListener()); - when(client.prepareIndex(SECURITY_PROFILE_ALIAS)).thenReturn( - new IndexRequestBuilder(client, IndexAction.INSTANCE, SECURITY_PROFILE_ALIAS) - ); + when(client.prepareIndex(SECURITY_PROFILE_ALIAS)).thenReturn(new IndexRequestBuilder(client, SECURITY_PROFILE_ALIAS)); final RuntimeException expectedException = new RuntimeException("expected"); doAnswer(invocation -> { @@ -895,14 +890,8 @@ public void testShouldSkipUpdateForActivate() { public void testActivateWhenShouldSkipUpdateForActivateReturnsTrue() throws IOException { final ProfileService service = spy(profileService); - doAnswer( - invocation -> new UpdateRequestBuilder( - client, - UpdateAction.INSTANCE, - SECURITY_PROFILE_ALIAS, - (String) invocation.getArguments()[1] - ) - ).when(client).prepareUpdate(eq(SECURITY_PROFILE_ALIAS), anyString()); + doAnswer(invocation -> new UpdateRequestBuilder(client, SECURITY_PROFILE_ALIAS, (String) invocation.getArguments()[1])).when(client) + .prepareUpdate(eq(SECURITY_PROFILE_ALIAS), anyString()); final UpdateResponse updateResponse = mock(UpdateResponse.class); when(updateResponse.getPrimaryTerm()).thenReturn(randomNonNegativeLong()); @@ -931,14 +920,8 @@ public void testActivateWhenShouldSkipUpdateForActivateReturnsTrue() throws IOEx public void testActivateWhenShouldSkipUpdateForActivateReturnsFalseFirst() throws IOException { final ProfileService service = spy(profileService); - doAnswer( - invocation -> new UpdateRequestBuilder( - client, - UpdateAction.INSTANCE, - SECURITY_PROFILE_ALIAS, - (String) invocation.getArguments()[1] - ) - ).when(client).prepareUpdate(eq(SECURITY_PROFILE_ALIAS), anyString()); + doAnswer(invocation -> new UpdateRequestBuilder(client, SECURITY_PROFILE_ALIAS, (String) invocation.getArguments()[1])).when(client) + .prepareUpdate(eq(SECURITY_PROFILE_ALIAS), anyString()); // Throw version conflict on update to force GET document final Exception updateException; @@ -993,14 +976,8 @@ public void testActivateWhenShouldSkipUpdateForActivateReturnsFalseFirst() throw public void testActivateWhenGetRequestErrors() throws IOException { final ProfileService service = spy(profileService); - doAnswer( - invocation -> new UpdateRequestBuilder( - client, - UpdateAction.INSTANCE, - SECURITY_PROFILE_ALIAS, - (String) invocation.getArguments()[1] - ) - ).when(client).prepareUpdate(eq(SECURITY_PROFILE_ALIAS), anyString()); + doAnswer(invocation -> new UpdateRequestBuilder(client, SECURITY_PROFILE_ALIAS, (String) invocation.getArguments()[1])).when(client) + .prepareUpdate(eq(SECURITY_PROFILE_ALIAS), anyString()); // Throw version conflict on update to force GET document final var versionConflictEngineException = new VersionConflictEngineException(mock(ShardId.class), "", ""); @@ -1069,7 +1046,7 @@ public void testUsageStats() { return null; }).when(client).execute(eq(TransportMultiSearchAction.TYPE), any(MultiSearchRequest.class), anyActionListener()); - when(client.prepareMultiSearch()).thenReturn(new MultiSearchRequestBuilder(client, TransportMultiSearchAction.TYPE)); + when(client.prepareMultiSearch()).thenReturn(new MultiSearchRequestBuilder(client)); final PlainActionFuture> future = new PlainActionFuture<>(); profileService.usageStats(future); assertThat(future.actionGet(), equalTo(metrics)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityMocks.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityMocks.java index a15d8409fe2b4..2f8666501f523 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityMocks.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityMocks.java @@ -10,7 +10,6 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; @@ -103,7 +102,7 @@ public static void mockGetRequest(Client client, String indexAliasName, String d } public static void mockGetRequest(Client client, String indexAliasName, String documentId, GetResult result) { - final GetRequestBuilder requestBuilder = new GetRequestBuilder(client, GetAction.INSTANCE); + final GetRequestBuilder requestBuilder = new GetRequestBuilder(client); requestBuilder.setIndex(indexAliasName); requestBuilder.setId(documentId); when(client.prepareGet(indexAliasName, documentId)).thenReturn(requestBuilder); @@ -125,7 +124,7 @@ public static void mockGetRequest(Client client, String indexAliasName, String d } public static void mockGetRequestException(Client client, Exception e) { - when(client.prepareGet(anyString(), anyString())).thenReturn(new GetRequestBuilder(client, GetAction.INSTANCE)); + when(client.prepareGet(anyString(), anyString())).thenReturn(new GetRequestBuilder(client)); doAnswer(inv -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) inv.getArguments()[1]; @@ -207,7 +206,7 @@ public static void mockIndexRequest(Client client, String indexAliasName, Consum Assert.assertThat(inv.getArguments(), arrayWithSize(1)); final Object requestIndex = inv.getArguments()[0]; Assert.assertThat(requestIndex, instanceOf(String.class)); - return new IndexRequestBuilder(client, IndexAction.INSTANCE).setIndex((String) requestIndex); + return new IndexRequestBuilder(client).setIndex((String) requestIndex); }).when(client).prepareIndex(anyString()); doAnswer(inv -> { Assert.assertThat(inv.getArguments(), arrayWithSize(3)); diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestBuilder.java index 642a7b5982793..8f929e0fbb5d6 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestBuilder.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlClearCursorRequestBuilder.java @@ -11,8 +11,8 @@ public class SqlClearCursorRequestBuilder extends ActionRequestBuilder { - public SqlClearCursorRequestBuilder(ElasticsearchClient client, SqlClearCursorAction action) { - super(client, action, new SqlClearCursorRequest()); + public SqlClearCursorRequestBuilder(ElasticsearchClient client) { + super(client, SqlClearCursorAction.INSTANCE, new SqlClearCursorRequest()); } public SqlClearCursorRequestBuilder cursor(String cursor) { diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java index aabf2035a74f0..ee4ac2872b336 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java @@ -26,10 +26,9 @@ */ public class SqlQueryRequestBuilder extends ActionRequestBuilder { - public SqlQueryRequestBuilder(ElasticsearchClient client, SqlQueryAction action) { + public SqlQueryRequestBuilder(ElasticsearchClient client) { this( client, - action, "", emptyList(), null, @@ -53,7 +52,6 @@ public SqlQueryRequestBuilder(ElasticsearchClient client, SqlQueryAction action) public SqlQueryRequestBuilder( ElasticsearchClient client, - SqlQueryAction action, String query, List params, QueryBuilder filter, @@ -75,7 +73,7 @@ public SqlQueryRequestBuilder( ) { super( client, - action, + SqlQueryAction.INSTANCE, new SqlQueryRequest( query, params, diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java index 76c15d2a19c3a..01bfe118bde6e 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java @@ -25,10 +25,9 @@ * Builder for the request for the sql action for translating SQL queries into ES requests */ public class SqlTranslateRequestBuilder extends ActionRequestBuilder { - public SqlTranslateRequestBuilder(ElasticsearchClient client, SqlTranslateAction action) { + public SqlTranslateRequestBuilder(ElasticsearchClient client) { this( client, - action, null, null, emptyMap(), @@ -43,7 +42,6 @@ public SqlTranslateRequestBuilder(ElasticsearchClient client, SqlTranslateAction public SqlTranslateRequestBuilder( ElasticsearchClient client, - SqlTranslateAction action, String query, QueryBuilder filter, Map runtimeMappings, @@ -56,7 +54,7 @@ public SqlTranslateRequestBuilder( ) { super( client, - action, + SqlTranslateAction.INSTANCE, new SqlTranslateRequest(query, params, filter, runtimeMappings, zoneId, fetchSize, requestTimeout, pageTimeout, requestInfo) ); } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java index 4863f973f163f..a3acd545730c9 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java @@ -109,7 +109,7 @@ public void testBasicAsyncExecution() throws Exception { boolean success = randomBoolean(); String query = "SELECT event_type FROM test WHERE " + (success ? "i=1" : "10/i=1"); - SqlQueryRequestBuilder builder = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query(query) + SqlQueryRequestBuilder builder = new SqlQueryRequestBuilder(client()).query(query) .waitForCompletionTimeout(TimeValue.timeValueMillis(1)); List plugins = initBlockFactory(true, false); @@ -159,7 +159,7 @@ public void testGoingAsync() throws Exception { boolean success = randomBoolean(); String query = "SELECT event_type FROM test WHERE " + (success ? "i=1" : "10/i=1"); - SqlQueryRequestBuilder builder = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query(query) + SqlQueryRequestBuilder builder = new SqlQueryRequestBuilder(client()).query(query) .waitForCompletionTimeout(TimeValue.timeValueMillis(1)); boolean customKeepAlive = randomBoolean(); @@ -215,7 +215,7 @@ public void testAsyncCancellation() throws Exception { boolean success = randomBoolean(); String query = "SELECT event_type FROM test WHERE " + (success ? "i=1" : "10/i=1"); - SqlQueryRequestBuilder builder = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query(query) + SqlQueryRequestBuilder builder = new SqlQueryRequestBuilder(client()).query(query) .waitForCompletionTimeout(TimeValue.timeValueMillis(1)); boolean customKeepAlive = randomBoolean(); @@ -257,7 +257,7 @@ public void testFinishingBeforeTimeout() throws Exception { boolean success = randomBoolean(); boolean keepOnCompletion = randomBoolean(); String query = "SELECT event_type FROM test WHERE " + (success ? "i=1" : "10/i=1"); - SqlQueryRequestBuilder builder = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query(query) + SqlQueryRequestBuilder builder = new SqlQueryRequestBuilder(client()).query(query) .waitForCompletionTimeout(TimeValue.timeValueSeconds(10)); if (keepOnCompletion || randomBoolean()) { builder.keepOnCompletion(keepOnCompletion); diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java index 4212d1e641934..5892c5ed967b4 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlActionIT.java @@ -30,9 +30,10 @@ public void testSqlAction() { boolean dataBeforeCount = randomBoolean(); String columns = dataBeforeCount ? "data, count" : "count, data"; - SqlQueryResponse response = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query( - "SELECT " + columns + " FROM test ORDER BY count" - ).mode(Mode.JDBC).version(Version.CURRENT.toString()).get(); + SqlQueryResponse response = new SqlQueryRequestBuilder(client()).query("SELECT " + columns + " FROM test ORDER BY count") + .mode(Mode.JDBC) + .version(Version.CURRENT.toString()) + .get(); assertThat(response.size(), equalTo(2L)); assertThat(response.columns(), hasSize(2)); int dataIndex = dataBeforeCount ? 0 : 1; @@ -48,7 +49,7 @@ public void testSqlAction() { } public void testSqlActionCurrentVersion() { - SqlQueryResponse response = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SELECT true") + SqlQueryResponse response = new SqlQueryRequestBuilder(client()).query("SELECT true") .mode(randomFrom(Mode.CLI, Mode.JDBC)) .version(Version.CURRENT.toString()) .get(); @@ -57,7 +58,7 @@ public void testSqlActionCurrentVersion() { } public void testSqlActionOutdatedVersion() { - SqlQueryRequestBuilder request = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SELECT true") + SqlQueryRequestBuilder request = new SqlQueryRequestBuilder(client()).query("SELECT true") .mode(randomFrom(Mode.CLI, Mode.JDBC)) .version("1.2.3"); assertRequestBuilderThrows(request, org.elasticsearch.action.ActionRequestValidationException.class); diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java index f23d9de1c79e4..13976514cd347 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java @@ -63,9 +63,7 @@ public void testCancellation() throws Exception { indexRandom(true, builders); boolean cancelDuringSearch = randomBoolean(); List plugins = initBlockFactory(cancelDuringSearch, cancelDuringSearch == false); - SqlQueryRequest request = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query( - "SELECT event_type FROM test WHERE val=1" - ).request(); + SqlQueryRequest request = new SqlQueryRequestBuilder(client()).query("SELECT event_type FROM test WHERE val=1").request(); String id = randomAlphaOfLength(10); logger.trace("Preparing search"); // We might perform field caps on the same thread if it is local client, so we cannot use the standard mechanism diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java index c575b73bdd878..00b6ad98ab926 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlClearCursorActionIT.java @@ -34,18 +34,14 @@ public void testSqlClearCursorAction() { int fetchSize = randomIntBetween(5, 20); logger.info("Fetching {} records at a time", fetchSize); - SqlQueryResponse sqlQueryResponse = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SELECT * FROM test") - .fetchSize(fetchSize) - .get(); + SqlQueryResponse sqlQueryResponse = new SqlQueryRequestBuilder(client()).query("SELECT * FROM test").fetchSize(fetchSize).get(); assertEquals(fetchSize, sqlQueryResponse.size()); assertThat(getNumberOfSearchContexts(), greaterThan(0L)); assertThat(sqlQueryResponse.cursor(), notNullValue()); assertThat(sqlQueryResponse.cursor(), not(equalTo(Cursor.EMPTY))); - SqlClearCursorResponse cleanCursorResponse = new SqlClearCursorRequestBuilder(client(), SqlClearCursorAction.INSTANCE).cursor( - sqlQueryResponse.cursor() - ).get(); + SqlClearCursorResponse cleanCursorResponse = new SqlClearCursorRequestBuilder(client()).cursor(sqlQueryResponse.cursor()).get(); assertTrue(cleanCursorResponse.isSucceeded()); assertEquals(0, getNumberOfSearchContexts()); @@ -66,9 +62,7 @@ public void testAutoCursorCleanup() { int fetchSize = randomIntBetween(5, 20); logger.info("Fetching {} records at a time", fetchSize); - SqlQueryResponse sqlQueryResponse = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SELECT * FROM test") - .fetchSize(fetchSize) - .get(); + SqlQueryResponse sqlQueryResponse = new SqlQueryRequestBuilder(client()).query("SELECT * FROM test").fetchSize(fetchSize).get(); assertEquals(fetchSize, sqlQueryResponse.size()); assertThat(getNumberOfSearchContexts(), greaterThan(0L)); @@ -77,14 +71,12 @@ public void testAutoCursorCleanup() { long fetched = sqlQueryResponse.size(); do { - sqlQueryResponse = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).cursor(sqlQueryResponse.cursor()).get(); + sqlQueryResponse = new SqlQueryRequestBuilder(client()).cursor(sqlQueryResponse.cursor()).get(); fetched += sqlQueryResponse.size(); } while (sqlQueryResponse.cursor().isEmpty() == false); assertEquals(indexSize, fetched); - SqlClearCursorResponse cleanCursorResponse = new SqlClearCursorRequestBuilder(client(), SqlClearCursorAction.INSTANCE).cursor( - sqlQueryResponse.cursor() - ).get(); + SqlClearCursorResponse cleanCursorResponse = new SqlClearCursorRequestBuilder(client()).cursor(sqlQueryResponse.cursor()).get(); assertFalse(cleanCursorResponse.isSucceeded()); assertEquals(0, getNumberOfSearchContexts()); diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java index 0374818d7e3b5..c16b0554d8738 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlLicenseIT.java @@ -111,12 +111,12 @@ public void testSqlQueryActionLicense() throws Exception { ElasticsearchSecurityException e = expectThrows( ElasticsearchSecurityException.class, - () -> new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SELECT * FROM test").get() + () -> new SqlQueryRequestBuilder(client()).query("SELECT * FROM test").get() ); assertThat(e.getMessage(), equalTo("current license is non-compliant for [sql]")); enableSqlLicensing(); - SqlQueryResponse response = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SELECT * FROM test").get(); + SqlQueryResponse response = new SqlQueryRequestBuilder(client()).query("SELECT * FROM test").get(); assertThat(response.size(), Matchers.equalTo(2L)); } @@ -126,14 +126,12 @@ public void testSqlQueryActionJdbcModeLicense() throws Exception { ElasticsearchSecurityException e = expectThrows( ElasticsearchSecurityException.class, - () -> new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SELECT * FROM test").mode("jdbc").get() + () -> new SqlQueryRequestBuilder(client()).query("SELECT * FROM test").mode("jdbc").get() ); assertThat(e.getMessage(), equalTo("current license is non-compliant for [jdbc]")); enableJdbcLicensing(); - SqlQueryResponse response = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query("SELECT * FROM test") - .mode("jdbc") - .get(); + SqlQueryResponse response = new SqlQueryRequestBuilder(client()).query("SELECT * FROM test").mode("jdbc").get(); assertThat(response.size(), Matchers.equalTo(2L)); } @@ -143,13 +141,12 @@ public void testSqlTranslateActionLicense() throws Exception { ElasticsearchSecurityException e = expectThrows( ElasticsearchSecurityException.class, - () -> new SqlTranslateRequestBuilder(client(), SqlTranslateAction.INSTANCE).query("SELECT * FROM test").get() + () -> new SqlTranslateRequestBuilder(client()).query("SELECT * FROM test").get() ); assertThat(e.getMessage(), equalTo("current license is non-compliant for [sql]")); enableSqlLicensing(); - SqlTranslateResponse response = new SqlTranslateRequestBuilder(client(), SqlTranslateAction.INSTANCE).query("SELECT * FROM test") - .get(); + SqlTranslateResponse response = new SqlTranslateRequestBuilder(client()).query("SELECT * FROM test").get(); SearchSourceBuilder source = response.source(); assertThat(source.docValueFields(), Matchers.contains(new FieldAndFormat("count", null))); FetchSourceContext fetchSource = source.fetchSource(); diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlSearchPageTimeoutIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlSearchPageTimeoutIT.java index 5eec6b8b802d1..37b4ca3268f15 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlSearchPageTimeoutIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlSearchPageTimeoutIT.java @@ -42,7 +42,7 @@ public void testSearchContextIsCleanedUpAfterPageTimeoutForAggregationQueries() public void testSearchContextIsCleanedUpAfterPageTimeout(String query) throws Exception { setupTestIndex(); - SqlQueryResponse response = new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).query(query) + SqlQueryResponse response = new SqlQueryRequestBuilder(client()).query(query) .fetchSize(1) .pageTimeout(TimeValue.timeValueMillis(500)) .get(); @@ -54,7 +54,7 @@ public void testSearchContextIsCleanedUpAfterPageTimeout(String query) throws Ex SearchPhaseExecutionException exception = expectThrows( SearchPhaseExecutionException.class, - () -> new SqlQueryRequestBuilder(client(), SqlQueryAction.INSTANCE).cursor(response.cursor()).get() + () -> new SqlQueryRequestBuilder(client()).cursor(response.cursor()).get() ); assertThat(Arrays.asList(exception.guessRootCauses()), contains(instanceOf(SearchContextMissingException.class))); diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java index 20f38342a1a39..49f0af83a8e83 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlTranslateActionIT.java @@ -31,9 +31,8 @@ public void testSqlTranslateAction() { boolean columnOrder = randomBoolean(); String columns = columnOrder ? "data, count, date" : "date, data, count"; - SqlTranslateResponse response = new SqlTranslateRequestBuilder(client(), SqlTranslateAction.INSTANCE).query( - "SELECT " + columns + " FROM test ORDER BY count" - ).get(); + SqlTranslateResponse response = new SqlTranslateRequestBuilder(client()).query("SELECT " + columns + " FROM test ORDER BY count") + .get(); SearchSourceBuilder source = response.source(); List actualFields = source.fetchFields(); List expectedFields = new ArrayList<>(3); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java index 1fbe85d873957..86cb54a33bb5a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.ql.index.IndexResolver; import org.elasticsearch.xpack.ql.type.DefaultDataTypeRegistry; import org.elasticsearch.xpack.sql.SqlTestUtils; -import org.elasticsearch.xpack.sql.action.SqlQueryAction; import org.elasticsearch.xpack.sql.action.SqlQueryRequest; import org.elasticsearch.xpack.sql.action.SqlQueryRequestBuilder; import org.elasticsearch.xpack.sql.action.SqlQueryResponse; @@ -72,7 +71,7 @@ public void testCancellationBeforeFieldCaps() throws InterruptedException { IndexResolver indexResolver = indexResolver(client); PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); CountDownLatch countDownLatch = new CountDownLatch(1); - SqlQueryRequest request = new SqlQueryRequestBuilder(client, SqlQueryAction.INSTANCE).query("SELECT foo FROM bar").request(); + SqlQueryRequest request = new SqlQueryRequestBuilder(client).query("SELECT foo FROM bar").request(); TransportSqlQueryAction.operation(planExecutor, task, request, new ActionListener<>() { @Override public void onResponse(SqlQueryResponse sqlSearchResponse) { @@ -135,8 +134,7 @@ public void testCancellationBeforeSearch() throws InterruptedException { IndexResolver indexResolver = indexResolver(client); PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); CountDownLatch countDownLatch = new CountDownLatch(1); - SqlQueryRequest request = new SqlQueryRequestBuilder(client, SqlQueryAction.INSTANCE).query("SELECT foo FROM " + indices[0]) - .request(); + SqlQueryRequest request = new SqlQueryRequestBuilder(client).query("SELECT foo FROM " + indices[0]).request(); TransportSqlQueryAction.operation(planExecutor, task, request, new ActionListener<>() { @Override public void onResponse(SqlQueryResponse sqlSearchResponse) { @@ -196,7 +194,7 @@ public void testCancellationDuringSearch(String query) throws InterruptedExcepti // Emulation of search cancellation ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); - when(client.prepareSearch(any())).thenReturn(new SearchRequestBuilder(client, TransportSearchAction.TYPE).setIndices(indices)); + when(client.prepareSearch(any())).thenReturn(new SearchRequestBuilder(client).setIndices(indices)); doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") SearchRequest request = (SearchRequest) invocation.getArguments()[1]; @@ -224,7 +222,7 @@ public void testCancellationDuringSearch(String query) throws InterruptedExcepti IndexResolver indexResolver = indexResolver(client); PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); - SqlQueryRequest request = new SqlQueryRequestBuilder(client, SqlQueryAction.INSTANCE).query(query).request(); + SqlQueryRequest request = new SqlQueryRequestBuilder(client).query(query).request(); CountDownLatch countDownLatch = new CountDownLatch(1); TransportSqlQueryAction.operation(planExecutor, task, request, new ActionListener<>() { @Override From 93681cd9406e25d72628cad3d34523ee42b479d7 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Thu, 30 Nov 2023 18:14:19 +0100 Subject: [PATCH 113/263] [Enterprise Search] Add delete connector sync job by id endpoint (#102815) Add a DELETE endpoint for connector sync jobs. --- .../api/connector_sync_job.delete.json | 32 +++++ .../410_connector_sync_job_delete.yml | 37 ++++++ .../xpack/application/EnterpriseSearch.java | 11 +- .../syncjob/ConnectorSyncJobIndexService.java | 64 ++++++++++ .../action/DeleteConnectorSyncJobAction.java | 112 ++++++++++++++++++ .../action/PostConnectorSyncJobAction.java | 3 - .../RestDeleteConnectorSyncJobAction.java | 47 ++++++++ .../RestPostConnectorSyncJobAction.java | 3 +- ...TransportDeleteConnectorSyncJobAction.java | 49 ++++++++ .../ConnectorSyncJobIndexServiceTests.java | 46 +++++++ .../syncjob/ConnectorSyncJobTestUtils.java | 5 + ...ncJobActionRequestBWCSerializingTests.java | 48 ++++++++ .../DeleteConnectorSyncJobActionTests.java | 34 ++++++ ...portDeleteConnectorSyncJobActionTests.java | 76 ++++++++++++ .../xpack/security/operator/Constants.java | 1 + 15 files changed, 562 insertions(+), 6 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/410_connector_sync_job_delete.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json new file mode 100644 index 0000000000000..de8ffff861a98 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json @@ -0,0 +1,32 @@ +{ + "connector_sync_job.delete": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Deletes a connector sync job." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}", + "methods": [ + "DELETE" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to be deleted." + } + } + } + ] + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/410_connector_sync_job_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/410_connector_sync_job_delete.yml new file mode 100644 index 0000000000000..67fea7fc0cd3c --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/410_connector_sync_job_delete.yml @@ -0,0 +1,37 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +"Delete Connector Sync Job": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: sync-job-id-to-delete } + - do: + connector_sync_job.delete: + connector_sync_job_id: $sync-job-id-to-delete + + - match: { acknowledged: true } + + +--- +"Delete Connector Sync Job - Connector Sync Job does not exist": + - do: + connector_sync_job.delete: + connector_sync_job_id: test-nonexistent-connector-sync-job-id + catch: missing + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 970bc38c0145f..26ac6dc9b939d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -57,8 +57,11 @@ import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; @@ -120,6 +123,8 @@ public class EnterpriseSearch extends Plugin implements ActionPlugin, SystemInde public static final String CONNECTOR_API_ENDPOINT = "_connector"; + public static final String CONNECTOR_SYNC_JOB_API_ENDPOINT = CONNECTOR_API_ENDPOINT + "/_sync_job"; + private static final Logger logger = LogManager.getLogger(EnterpriseSearch.class); public static final String FEATURE_NAME = "ent_search"; @@ -181,7 +186,8 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), // SyncJob API - new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class) + new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class), + new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class) ) ); } @@ -240,7 +246,8 @@ public List getRestHandlers( new RestUpdateConnectorSchedulingAction(), // SyncJob API - new RestPostConnectorSyncJobAction() + new RestPostConnectorSyncJobAction(), + new RestDeleteConnectorSyncJobAction() ) ); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index f259cb1e0a8c0..5deb63fd60669 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -7,9 +7,14 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DelegatingActionListener; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; @@ -17,6 +22,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; @@ -30,6 +36,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; @@ -156,4 +163,61 @@ public void onFailure(Exception e) { listener.onFailure(e); } } + + /** + * Deletes the {@link ConnectorSyncJob} in the underlying index. + * + * @param connectorSyncJobId The id of the connector sync job object. + * @param listener The action listener to invoke on response/failure. + */ + public void deleteConnectorSyncJob(String connectorSyncJobId, ActionListener listener) { + final DeleteRequest deleteRequest = new DeleteRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).id(connectorSyncJobId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + try { + clientWithOrigin.delete( + deleteRequest, + new DelegatingIndexNotFoundActionListener<>(connectorSyncJobId, listener, (l, deleteResponse) -> { + if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + l.onResponse(deleteResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Listeners that checks failures for IndexNotFoundException, and transforms them in ResourceNotFoundException, + * invoking onFailure on the delegate listener + */ + static class DelegatingIndexNotFoundActionListener extends DelegatingActionListener { + + private final BiConsumer, T> bc; + private final String connectorSyncJobId; + + DelegatingIndexNotFoundActionListener(String connectorSyncJobId, ActionListener delegate, BiConsumer, T> bc) { + super(delegate); + this.bc = bc; + this.connectorSyncJobId = connectorSyncJobId; + } + + @Override + public void onResponse(T t) { + bc.accept(delegate, t); + } + + @Override + public void onFailure(Exception e) { + Throwable cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof IndexNotFoundException) { + delegate.onFailure(new ResourceNotFoundException("connector sync job [" + connectorSyncJobId + "] not found")); + return; + } + delegate.onFailure(e); + } + } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java new file mode 100644 index 0000000000000..147f8784a8ec7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class DeleteConnectorSyncJobAction extends ActionType { + + public static final DeleteConnectorSyncJobAction INSTANCE = new DeleteConnectorSyncJobAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/delete"; + + private DeleteConnectorSyncJobAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends ActionRequest implements ToXContentObject { + public static final String EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE = + "[connector_sync_job_id] of the connector sync job cannot be null or empty."; + public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); + + private final String connectorSyncJobId; + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + } + + public Request(String connectorSyncJobId) { + this.connectorSyncJobId = connectorSyncJobId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError(EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, validationException); + } + + return validationException; + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJobId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CONNECTOR_SYNC_JOB_ID_FIELD.getPreferredName(), connectorSyncJobId); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "delete_connector_sync_job_request", + false, + (args) -> new Request((String) args[0]) + ); + + static { + PARSER.declareString(constructorArg(), CONNECTOR_SYNC_JOB_ID_FIELD); + + } + + public static DeleteConnectorSyncJobAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java index 05da4dd798c83..af5ce93548503 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobAction.java @@ -33,7 +33,6 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.xpack.application.EnterpriseSearch.CONNECTOR_API_ENDPOINT; public class PostConnectorSyncJobAction extends ActionType { @@ -41,8 +40,6 @@ public class PostConnectorSyncJobAction extends ActionType routes() { + return List.of( + new Route( + RestRequest.Method.DELETE, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + DeleteConnectorSyncJobAction.Request request = new DeleteConnectorSyncJobAction.Request( + restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM) + ); + return restChannel -> client.execute(DeleteConnectorSyncJobAction.INSTANCE, request, new RestToXContentListener<>(restChannel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java index 2a1b9d15d2451..51cc890418dcb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestPostConnectorSyncJobAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; import java.io.IOException; import java.util.List; @@ -27,7 +28,7 @@ public String getName() { @Override public List routes() { - return List.of(new Route(POST, "/" + PostConnectorSyncJobAction.CONNECTOR_SYNC_JOB_API_ENDPOINT)); + return List.of(new Route(POST, "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT)); } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobAction.java new file mode 100644 index 0000000000000..25b93f754e9a2 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportDeleteConnectorSyncJobAction extends HandledTransportAction< + DeleteConnectorSyncJobAction.Request, + AcknowledgedResponse> { + + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportDeleteConnectorSyncJobAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + DeleteConnectorSyncJobAction.NAME, + transportService, + actionFilters, + DeleteConnectorSyncJobAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute(Task task, DeleteConnectorSyncJobAction.Request request, ActionListener listener) { + connectorSyncJobIndexService.deleteConnectorSyncJob(request.getConnectorSyncJobId(), listener.map(r -> AcknowledgedResponse.TRUE)); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 309675490ad99..9ac1f4935c6cc 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -7,14 +7,17 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.application.connector.Connector; @@ -151,6 +154,24 @@ public void testCreateConnectorSyncJob_WithMissingConnectorId_ExpectException() ); } + public void testDeleteConnectorSyncJob() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + + assertThat(syncJobId, notNullValue()); + + DeleteResponse deleteResponse = awaitDeleteConnectorSyncJob(syncJobId); + + assertThat(deleteResponse.status(), equalTo(RestStatus.OK)); + } + + public void testDeleteConnectorSyncJob_WithMissingSyncJobId_ExpectException() { + expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnectorSyncJob("non-existing-sync-job-id")); + } + private Map getConnectorSyncJobSourceById(String syncJobId) throws ExecutionException, InterruptedException, TimeoutException { GetRequest getRequest = new GetRequest(ConnectorSyncJobIndexService.CONNECTOR_SYNC_JOB_INDEX_NAME, syncJobId); @@ -183,6 +204,31 @@ public void onFailure(Exception e) { assertTrue("Timeout waiting for put request", requestTimedOut); } + private DeleteResponse awaitDeleteConnectorSyncJob(String connectorSyncJobId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorSyncJobIndexService.deleteConnectorSyncJob(connectorSyncJobId, new ActionListener<>() { + @Override + public void onResponse(DeleteResponse deleteResponse) { + resp.set(deleteResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for delete request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from delete request", resp.get()); + return resp.get(); + } + private PostConnectorSyncJobAction.Response awaitPutConnectorSyncJob(PostConnectorSyncJobAction.Request syncJobRequest) throws Exception { CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 5ce6925ae1cda..099173735edd2 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -9,6 +9,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import java.time.Instant; @@ -74,6 +75,10 @@ public static PostConnectorSyncJobAction.Request getRandomPostConnectorSyncJobAc ); } + public static DeleteConnectorSyncJobAction.Request getRandomDeleteConnectorSyncJobActionRequest() { + return new DeleteConnectorSyncJobAction.Request(randomAlphaOfLengthBetween(5, 15)); + } + public static PostConnectorSyncJobAction.Request getRandomPostConnectorSyncJobActionRequest(String connectorId) { return new PostConnectorSyncJobAction.Request( connectorId, diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..c9d2c446e028b --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionRequestBWCSerializingTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class DeleteConnectorSyncJobActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + DeleteConnectorSyncJobAction.Request> { + + @Override + protected Writeable.Reader instanceReader() { + return DeleteConnectorSyncJobAction.Request::new; + } + + @Override + protected DeleteConnectorSyncJobAction.Request createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomDeleteConnectorSyncJobActionRequest(); + } + + @Override + protected DeleteConnectorSyncJobAction.Request mutateInstance(DeleteConnectorSyncJobAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected DeleteConnectorSyncJobAction.Request doParseInstance(XContentParser parser) throws IOException { + return DeleteConnectorSyncJobAction.Request.parse(parser); + } + + @Override + protected DeleteConnectorSyncJobAction.Request mutateInstanceForVersion( + DeleteConnectorSyncJobAction.Request instance, + TransportVersion version + ) { + return new DeleteConnectorSyncJobAction.Request(instance.getConnectorSyncJobId()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..ee79db86152c6 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class DeleteConnectorSyncJobActionTests extends ESTestCase { + public void testValidate_WhenConnectorSyncJobIdIsPresent_ExpectNoValidationError() { + DeleteConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomDeleteConnectorSyncJobActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExpectValidationError() { + DeleteConnectorSyncJobAction.Request requestWithMissingConnectorId = new DeleteConnectorSyncJobAction.Request(""); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(DeleteConnectorSyncJobAction.Request.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..9130c44d30da7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportDeleteConnectorSyncJobActionTests.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportDeleteConnectorSyncJobActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportDeleteConnectorSyncJobAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportDeleteConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testDeleteConnectorSyncJob_ExpectNoWarnings() throws InterruptedException { + DeleteConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomDeleteConnectorSyncJobActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(DeleteConnectorSyncJobAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for delete request", requestTimedOut); + } + +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index fa74b2986550d..11edc66977e6c 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -128,6 +128,7 @@ public class Constants { "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", "cluster:admin/xpack/connector/sync_job/post", + "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", From 02c529530a2e667ef951627a685baa3101907c54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Thu, 30 Nov 2023 18:31:25 +0100 Subject: [PATCH 114/263] [Transform] Split comma-separated source index strings into separate indices (#102811) --- docs/changelog/102811.yaml | 6 + .../transform/transforms/SourceConfig.java | 24 +++- .../transforms/SourceConfigTests.java | 108 ++++++++++++------ .../test/multi_cluster/80_transform.yml | 2 +- 4 files changed, 100 insertions(+), 40 deletions(-) create mode 100644 docs/changelog/102811.yaml diff --git a/docs/changelog/102811.yaml b/docs/changelog/102811.yaml new file mode 100644 index 0000000000000..039a337a53e87 --- /dev/null +++ b/docs/changelog/102811.yaml @@ -0,0 +1,6 @@ +pr: 102811 +summary: Split comma-separated source index strings into separate indices +area: Transform +type: bug +issues: + - 99564 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/SourceConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/SourceConfig.java index 53cbef88695f4..0957d70e9ab5c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/SourceConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/SourceConfig.java @@ -82,18 +82,30 @@ public SourceConfig(String... index) { * @param runtimeMappings Search-time runtime fields that can be used by the transform */ public SourceConfig(String[] index, QueryConfig queryConfig, Map runtimeMappings) { - ExceptionsHelper.requireNonNull(index, INDEX.getPreferredName()); + this.index = extractIndices(ExceptionsHelper.requireNonNull(index, INDEX.getPreferredName())); + this.queryConfig = ExceptionsHelper.requireNonNull(queryConfig, QUERY.getPreferredName()); + this.runtimeMappings = Collections.unmodifiableMap( + ExceptionsHelper.requireNonNull(runtimeMappings, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName()) + ); + } + + /** + * Extracts all index names or index patterns from the given array of strings. + * + * @param index array of indices (may contain comma-separated index names or index patterns) + * @return array of indices without comma-separated index names or index patterns + */ + private static String[] extractIndices(String[] index) { if (index.length == 0) { throw new IllegalArgumentException("must specify at least one index"); } if (Arrays.stream(index).anyMatch(Strings::isNullOrEmpty)) { throw new IllegalArgumentException("all indices need to be non-null and non-empty"); } - this.index = index; - this.queryConfig = ExceptionsHelper.requireNonNull(queryConfig, QUERY.getPreferredName()); - this.runtimeMappings = Collections.unmodifiableMap( - ExceptionsHelper.requireNonNull(runtimeMappings, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName()) - ); + return Arrays.stream(index) + .map(commaSeparatedIndices -> commaSeparatedIndices.split(",")) + .flatMap(Arrays::stream) + .toArray(String[]::new); } public SourceConfig(final StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/SourceConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/SourceConfigTests.java index a22c2231985e8..a88530904b3d2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/SourceConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/SourceConfigTests.java @@ -20,7 +20,9 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.toMap; +import static org.elasticsearch.xpack.core.transform.transforms.QueryConfigTests.randomQueryConfig; import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -29,11 +31,7 @@ public class SourceConfigTests extends AbstractSerializingTransformTestCase instanceReader() { return SourceConfig::new; } - public void testGetRuntimeMappings_EmptyRuntimeMappings() { - SourceConfig sourceConfig = new SourceConfig( - generateRandomStringArray(10, 10, false, false), - QueryConfigTests.randomQueryConfig(), - emptyMap() + public void testConstructor_NoIndices() { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new SourceConfig(new String[] {}, randomQueryConfig(), randomRuntimeMappings()) ); + assertThat(e.getMessage(), is(equalTo("must specify at least one index"))); + } + + public void testConstructor_EmptyIndex() { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new SourceConfig(new String[] { "" }, randomQueryConfig(), randomRuntimeMappings()) + ); + assertThat(e.getMessage(), is(equalTo("all indices need to be non-null and non-empty"))); + + e = expectThrows( + IllegalArgumentException.class, + () -> new SourceConfig(new String[] { "index1", "" }, randomQueryConfig(), randomRuntimeMappings()) + ); + assertThat(e.getMessage(), is(equalTo("all indices need to be non-null and non-empty"))); + } + + public void testGetIndex() { + SourceConfig sourceConfig = new SourceConfig(new String[] { "index1" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("index1"))); + + sourceConfig = new SourceConfig(new String[] { "index1", "index2", "index3" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("index1", "index2", "index3"))); + + sourceConfig = new SourceConfig(new String[] { "index1,index2,index3" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("index1", "index2", "index3"))); + + sourceConfig = new SourceConfig(new String[] { "index1", "index2,index3" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("index1", "index2", "index3"))); + + sourceConfig = new SourceConfig(new String[] { "index1", "remote2:index2" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("index1", "remote2:index2"))); + + sourceConfig = new SourceConfig(new String[] { "index1,remote2:index2" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("index1", "remote2:index2"))); + + sourceConfig = new SourceConfig(new String[] { "remote1:index1", "index2" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("remote1:index1", "index2"))); + + sourceConfig = new SourceConfig(new String[] { "remote1:index1,index2" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("remote1:index1", "index2"))); + + sourceConfig = new SourceConfig(new String[] { "index*,remote2:index*" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("index*", "remote2:index*"))); + + sourceConfig = new SourceConfig(new String[] { "remote1:index*,remote2:index*" }, randomQueryConfig(), randomRuntimeMappings()); + assertThat(sourceConfig.getIndex(), is(arrayContaining("remote1:index*", "remote2:index*"))); + } + + public void testGetRuntimeMappings_EmptyRuntimeMappings() { + SourceConfig sourceConfig = new SourceConfig(generateRandomStringArray(10, 10, false, false), randomQueryConfig(), emptyMap()); assertThat(sourceConfig.getRuntimeMappings(), is(anEmptyMap())); assertThat(sourceConfig.getScriptBasedRuntimeMappings(), is(anEmptyMap())); } @@ -111,48 +159,42 @@ public void testGetRuntimeMappings_NonEmptyRuntimeMappings() { put("field-C", singletonMap("script", "some other script")); } }; - SourceConfig sourceConfig = new SourceConfig( - generateRandomStringArray(10, 10, false, false), - QueryConfigTests.randomQueryConfig(), - runtimeMappings - ); + SourceConfig sourceConfig = new SourceConfig(generateRandomStringArray(10, 10, false, false), randomQueryConfig(), runtimeMappings); assertThat(sourceConfig.getRuntimeMappings(), is(equalTo(runtimeMappings))); assertThat(sourceConfig.getScriptBasedRuntimeMappings(), is(equalTo(scriptBasedRuntimeMappings))); } public void testRequiresRemoteCluster() { assertFalse( - new SourceConfig(new String[] { "index1", "index2", "index3" }, QueryConfigTests.randomQueryConfig(), randomRuntimeMappings()) + new SourceConfig(new String[] { "index1", "index2", "index3" }, randomQueryConfig(), randomRuntimeMappings()) + .requiresRemoteCluster() + ); + + assertTrue( + new SourceConfig(new String[] { "index1", "remote2:index2", "index3" }, randomQueryConfig(), randomRuntimeMappings()) .requiresRemoteCluster() ); assertTrue( - new SourceConfig( - new String[] { "index1", "remote2:index2", "index3" }, - QueryConfigTests.randomQueryConfig(), - randomRuntimeMappings() - ).requiresRemoteCluster() + new SourceConfig(new String[] { "index1", "index2", "remote3:index3" }, randomQueryConfig(), randomRuntimeMappings()) + .requiresRemoteCluster() ); assertTrue( - new SourceConfig( - new String[] { "index1", "index2", "remote3:index3" }, - QueryConfigTests.randomQueryConfig(), - randomRuntimeMappings() - ).requiresRemoteCluster() + new SourceConfig(new String[] { "index1", "remote2:index2", "remote3:index3" }, randomQueryConfig(), randomRuntimeMappings()) + .requiresRemoteCluster() ); assertTrue( - new SourceConfig( - new String[] { "index1", "remote2:index2", "remote3:index3" }, - QueryConfigTests.randomQueryConfig(), - randomRuntimeMappings() - ).requiresRemoteCluster() + new SourceConfig(new String[] { "remote1:index1" }, randomQueryConfig(), randomRuntimeMappings()).requiresRemoteCluster() + ); + + assertFalse( + new SourceConfig(new String[] { "index1,index2" }, randomQueryConfig(), randomRuntimeMappings()).requiresRemoteCluster() ); assertTrue( - new SourceConfig(new String[] { "remote1:index1" }, QueryConfigTests.randomQueryConfig(), randomRuntimeMappings()) - .requiresRemoteCluster() + new SourceConfig(new String[] { "index1,remote2:index2" }, randomQueryConfig(), randomRuntimeMappings()).requiresRemoteCluster() ); } } diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml index 0fc5b7fa3b3ad..d8ed7fce712b1 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml @@ -244,7 +244,7 @@ teardown: transform_id: "simple-local-remote-transform" body: > { - "source": { "index": ["test_index", "my_remote_cluster:remote_test_index"] }, + "source": { "index": "test_index,my_remote_cluster:remote_test_index" }, "dest": { "index": "simple-local-remote-transform" }, "pivot": { "group_by": { "user": {"terms": {"field": "user"}}}, From eaa5889c5c1ce1fd0b02634832e506cbf66470df Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Thu, 30 Nov 2023 13:30:58 -0500 Subject: [PATCH 115/263] Fixing byte quantized search test flakiness Quantized search result scores can be slightly different depending on if they are merged into a single segment, etc. --- .../test/search.vectors/41_knn_search_byte_quantized.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml index f700664c43fc1..12fb4d1bbcb1d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -257,7 +257,7 @@ setup: id: "2" body: name: moose.jpg - vector: [-0.5, 100.0, -13, 14.8, -156.0] + vector: [-0.5, 10.0, -13, 14.8, 15.0] - do: index: From d2584ecfa249fa6a0f3c5e98182f10876ec9e283 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Thu, 30 Nov 2023 18:57:48 +0000 Subject: [PATCH 116/263] Bump to lucene 9.9.0 RC2 --- build.gradle | 2 +- gradle/verification-metadata.xml | 48 ++++++++++++++++---------------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/build.gradle b/build.gradle index 4783868f4e0b2..d10f836db4024 100644 --- a/build.gradle +++ b/build.gradle @@ -198,7 +198,7 @@ subprojects { proj -> repositories { // TODO: Temporary for Lucene RC builds. REMOVE - maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.9.0-RC1-rev-92a5e5b02e0e083126c4122f2b7a02426c21a037/lucene/maven" } + maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.9.0-RC2-rev-06070c0dceba07f0d33104192d9ac98ca16fc500/lucene/maven" } } } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9d6f8f21bc74a..72422a28039f9 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2661,122 +2661,122 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + From 43dc74bfaf9ab8565309616b94c598b4accc8362 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Thu, 30 Nov 2023 14:39:41 -0500 Subject: [PATCH 117/263] Allow mismatched sort-by field types if there are no docs to sort (#102779) When searching multiple indices and a field only exists in ONE of the indices, we should allow sorting by that field, regardless of the "unmapped" type provided. closes: https://github.com/elastic/elasticsearch/issues/102723 --- docs/changelog/102779.yaml | 5 ++++ .../search/sort/FieldSortIT.java | 17 +++++++++++++- .../action/search/SearchPhaseController.java | 23 ++++++++++++------- 3 files changed, 36 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/102779.yaml diff --git a/docs/changelog/102779.yaml b/docs/changelog/102779.yaml new file mode 100644 index 0000000000000..7bbecb29665bd --- /dev/null +++ b/docs/changelog/102779.yaml @@ -0,0 +1,5 @@ +pr: 102779 +summary: Allow mismatched sort-by field types if there are no docs to sort +area: Search +type: bug +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 2cd68398e211f..21a784c16c8e9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -150,7 +150,7 @@ public void testIssue8226() { ); } - public void testIssue6614() throws ExecutionException, InterruptedException { + public void testIssue6614() throws InterruptedException { List builders = new ArrayList<>(); boolean strictTimeBasedIndices = randomBoolean(); final int numIndices = randomIntBetween(2, 25); // at most 25 days in the month @@ -2137,4 +2137,19 @@ public void testSortMixedFieldTypes() { } } + public void testSortMixedFieldTypesWithNoDocsForOneType() { + assertAcked(prepareCreate("index_long").setMapping("foo", "type=long").get()); + assertAcked(prepareCreate("index_other").setMapping("bar", "type=keyword").get()); + assertAcked(prepareCreate("index_double").setMapping("foo", "type=double").get()); + + prepareIndex("index_long").setId("1").setSource("foo", "123").get(); + prepareIndex("index_long").setId("2").setSource("foo", "124").get(); + prepareIndex("index_other").setId("1").setSource("bar", "124").get(); + refresh(); + + assertNoFailures( + prepareSearch("index_long", "index_double", "index_other").addSort(new FieldSortBuilder("foo").unmappedType("boolean")) + .setSize(10) + ); + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 0662e94b519d9..e262003935969 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -241,8 +241,7 @@ static TopDocs mergeTopDocs(Collection results, int topN, int from) { final TopFieldGroups[] shardTopDocs = results.toArray(new TopFieldGroups[numShards]); mergedTopDocs = TopFieldGroups.merge(sort, from, topN, shardTopDocs, false); } else if (topDocs instanceof TopFieldDocs firstTopDocs) { - checkSameSortTypes(results, firstTopDocs.fields); - final Sort sort = new Sort(firstTopDocs.fields); + final Sort sort = checkSameSortTypes(results, firstTopDocs.fields); final TopFieldDocs[] shardTopDocs = results.toArray(new TopFieldDocs[numShards]); mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs); } else { @@ -252,19 +251,26 @@ static TopDocs mergeTopDocs(Collection results, int topN, int from) { return mergedTopDocs; } - private static void checkSameSortTypes(Collection results, SortField[] firstSortFields) { - if (results.size() < 2) return; + private static Sort checkSameSortTypes(Collection results, SortField[] firstSortFields) { + Sort sort = new Sort(firstSortFields); + if (results.size() < 2) return sort; - SortField.Type[] firstTypes = new SortField.Type[firstSortFields.length]; + SortField.Type[] firstTypes = null; boolean isFirstResult = true; for (TopDocs topDocs : results) { + // We don't actually merge in empty score docs, so ignore potentially mismatched types if there are no docs + if (topDocs.scoreDocs == null || topDocs.scoreDocs.length == 0) { + continue; + } SortField[] curSortFields = ((TopFieldDocs) topDocs).fields; if (isFirstResult) { + sort = new Sort(curSortFields); + firstTypes = new SortField.Type[curSortFields.length]; for (int i = 0; i < curSortFields.length; i++) { - firstTypes[i] = getType(firstSortFields[i]); + firstTypes[i] = getType(curSortFields[i]); if (firstTypes[i] == SortField.Type.CUSTOM) { // for custom types that we can't resolve, we can't do the check - return; + return sort; } } isFirstResult = false; @@ -274,7 +280,7 @@ private static void checkSameSortTypes(Collection results, SortField[] if (curType != firstTypes[i]) { if (curType == SortField.Type.CUSTOM) { // for custom types that we can't resolve, we can't do the check - return; + return sort; } throw new IllegalArgumentException( "Can't sort on field [" @@ -289,6 +295,7 @@ private static void checkSameSortTypes(Collection results, SortField[] } } } + return sort; } private static SortField.Type getType(SortField sortField) { From 134781f9d8da35afa6d5d5fdf71cd736986e995c Mon Sep 17 00:00:00 2001 From: James Baiera Date: Thu, 30 Nov 2023 15:50:07 -0500 Subject: [PATCH 118/263] Fix GetDataStreamsResponseTests failing on non snapshot builds (#102724) --- .../action/GetDataStreamsResponseTests.java | 54 +++++++++++-------- 1 file changed, 32 insertions(+), 22 deletions(-) diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 39f2c06bc95f7..13054379dd666 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -165,17 +165,22 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti is(ManagedBy.LIFECYCLE.displayValue) ); - List failureStoresRepresentation = (List) dataStreamMap.get( - DataStream.FAILURE_INDICES_FIELD.getPreferredName() - ); - Map failureStoreRepresentation = (Map) failureStoresRepresentation.get(0); - assertThat(failureStoreRepresentation.get("index_name"), is(failureStoreIndex.getName())); - assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); - assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); - assertThat( - failureStoreRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), - is(ManagedBy.LIFECYCLE.displayValue) - ); + if (DataStream.isFailureStoreEnabled()) { + List failureStoresRepresentation = (List) dataStreamMap.get( + DataStream.FAILURE_INDICES_FIELD.getPreferredName() + ); + Map failureStoreRepresentation = (Map) failureStoresRepresentation.get(0); + assertThat(failureStoreRepresentation.get("index_name"), is(failureStoreIndex.getName())); + assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat( + failureStoreRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), + is(nullValue()) + ); + assertThat( + failureStoreRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.LIFECYCLE.displayValue) + ); + } } } @@ -258,17 +263,22 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti is(ManagedBy.UNMANAGED.displayValue) ); - List failureStoresRepresentation = (List) dataStreamMap.get( - DataStream.FAILURE_INDICES_FIELD.getPreferredName() - ); - Map failureStoreRepresentation = (Map) failureStoresRepresentation.get(0); - assertThat(failureStoreRepresentation.get("index_name"), is(failureStoreIndex.getName())); - assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); - assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), is(nullValue())); - assertThat( - failureStoreRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), - is(ManagedBy.UNMANAGED.displayValue) - ); + if (DataStream.isFailureStoreEnabled()) { + List failureStoresRepresentation = (List) dataStreamMap.get( + DataStream.FAILURE_INDICES_FIELD.getPreferredName() + ); + Map failureStoreRepresentation = (Map) failureStoresRepresentation.get(0); + assertThat(failureStoreRepresentation.get("index_name"), is(failureStoreIndex.getName())); + assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); + assertThat( + failureStoreRepresentation.get(Response.DataStreamInfo.ILM_POLICY_FIELD.getPreferredName()), + is(nullValue()) + ); + assertThat( + failureStoreRepresentation.get(Response.DataStreamInfo.MANAGED_BY.getPreferredName()), + is(ManagedBy.UNMANAGED.displayValue) + ); + } } } } From 38867b7fb6fe88210064e19cc80d98df0ddce2f7 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 30 Nov 2023 22:13:25 +0100 Subject: [PATCH 119/263] Remove outdated BwC code from search codebase (#102699) Cleaning these up for the search codebase since it hasn't been done in a while. --- .../mapping/put/PutMappingRequest.java | 8 +-- .../settings/put/UpdateSettingsRequest.java | 8 +-- .../action/search/SearchRequest.java | 33 +++-------- .../action/search/SearchResponse.java | 10 +--- .../search/CanMatchShardResponse.java | 11 +--- .../elasticsearch/search/DocValueFormat.java | 10 +--- .../CompositeValuesSourceBuilder.java | 9 +-- .../CompositeValuesSourceParserHelper.java | 10 ---- .../DateHistogramValuesSourceBuilder.java | 9 +-- .../bucket/composite/InternalComposite.java | 18 ++---- .../geogrid/GeoGridAggregationBuilder.java | 10 +--- .../DateHistogramAggregationBuilder.java | 8 +-- .../HistogramAggregationBuilder.java | 28 ++-------- .../bucket/terms/IncludeExclude.java | 11 ---- .../bucket/terms/InternalMappedTerms.java | 23 +++----- .../bucket/terms/InternalTerms.java | 11 +--- ...AbstractPercentilesAggregationBuilder.java | 28 +--------- .../metrics/InternalGeoCentroid.java | 31 +--------- .../metrics/InternalScriptedMetric.java | 24 +------- .../metrics/PercentilesConfig.java | 13 ----- .../metrics/TopHitsAggregationBuilder.java | 14 ++--- .../support/MultiValuesSourceFieldConfig.java | 37 +++--------- .../search/builder/SearchSourceBuilder.java | 32 +++-------- .../search/dfs/DfsSearchResult.java | 8 +-- .../search/fetch/FetchSearchResult.java | 11 +--- .../search/fetch/ShardFetchSearchRequest.java | 21 ++----- .../search/fetch/subphase/FieldAndFormat.java | 11 +--- .../highlight/AbstractHighlighterBuilder.java | 8 +-- .../search/internal/ShardSearchContextId.java | 22 ++------ .../search/internal/ShardSearchRequest.java | 56 ++++--------------- .../search/profile/ProfileResult.java | 11 +--- .../search/profile/SearchProfileResults.java | 15 +---- .../search/query/QuerySearchRequest.java | 11 +--- .../search/query/QuerySearchResult.java | 22 ++------ .../search/slice/SliceBuilder.java | 20 ++----- .../search/sort/FieldSortBuilder.java | 20 ++----- .../elasticsearch/search/sort/SortValue.java | 8 --- .../aggregations/bucket/GeoHashGridTests.java | 38 ------------- .../aggregations/bucket/GeoTileGridTests.java | 38 ------------- .../metrics/InternalScriptedMetricTests.java | 37 ------------ .../builder/SearchSourceBuilderTests.java | 13 ----- .../search/sort/SortValueTests.java | 17 ------ 42 files changed, 123 insertions(+), 660 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index eb758a40da5fe..45532d8024f87 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -88,9 +88,7 @@ public PutMappingRequest(StreamInput in) throws IOException { source = in.readString(); concreteIndex = in.readOptionalWriteable(Index::new); origin = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_9_0)) { - writeIndexOnly = in.readBoolean(); - } + writeIndexOnly = in.readBoolean(); } public PutMappingRequest() {} @@ -317,8 +315,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(source); out.writeOptionalWriteable(concreteIndex); out.writeOptionalString(origin); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_9_0)) { - out.writeBoolean(writeIndexOnly); - } + out.writeBoolean(writeIndexOnly); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index 013e568eff7c9..fba395748f476 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -55,9 +55,7 @@ public UpdateSettingsRequest(StreamInput in) throws IOException { indicesOptions = IndicesOptions.readIndicesOptions(in); settings = readSettingsFromStream(in); preserveExisting = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - origin = in.readString(); - } + origin = in.readString(); if (in.getTransportVersion().onOrAfter(TransportVersions.UPDATE_NON_DYNAMIC_SETTINGS_ADDED)) { reopen = in.readBoolean(); } @@ -200,9 +198,7 @@ public void writeTo(StreamOutput out) throws IOException { indicesOptions.writeIndicesOptions(out); settings.writeTo(out); out.writeBoolean(preserveExisting); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - out.writeString(origin); - } + out.writeString(origin); if (out.getTransportVersion().onOrAfter(TransportVersions.UPDATE_NON_DYNAMIC_SETTINGS_ADDED)) { out.writeBoolean(reopen); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 372890778d960..7ac8c4d5299d4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.search; -import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; @@ -265,15 +264,13 @@ public SearchRequest(StreamInput in) throws IOException { finalReduce = true; } ccsMinimizeRoundtrips = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0) && in.readBoolean()) { + if (in.readBoolean()) { minCompatibleShardNode = Version.readVersion(in); } else { minCompatibleShardNode = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - waitForCheckpoints = in.readMap(StreamInput::readLongArray); - waitForCheckpointsTimeout = in.readTimeValue(); - } + waitForCheckpoints = in.readMap(StreamInput::readLongArray); + waitForCheckpointsTimeout = in.readTimeValue(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { forceSyntheticSource = in.readBoolean(); } else { @@ -306,26 +303,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(finalReduce); } out.writeBoolean(ccsMinimizeRoundtrips); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - out.writeBoolean(minCompatibleShardNode != null); - if (minCompatibleShardNode != null) { - Version.writeVersion(minCompatibleShardNode, out); - } - } - TransportVersion waitForCheckpointsVersion = TransportVersions.V_7_16_0; - if (out.getTransportVersion().onOrAfter(waitForCheckpointsVersion)) { - out.writeMap(waitForCheckpoints, StreamOutput::writeLongArray); - out.writeTimeValue(waitForCheckpointsTimeout); - } else if (waitForCheckpoints.isEmpty() == false) { - throw new IllegalArgumentException( - "Remote transport version [" - + out.getTransportVersion() - + " incompatible with " - + "wait_for_checkpoints. All nodes must use transport version [" - + waitForCheckpointsVersion - + "] or greater." - ); + out.writeBoolean(minCompatibleShardNode != null); + if (minCompatibleShardNode != null) { + Version.writeVersion(minCompatibleShardNode, out); } + out.writeMap(waitForCheckpoints, StreamOutput::writeLongArray); + out.writeTimeValue(waitForCheckpointsTimeout); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { out.writeBoolean(forceSyntheticSource); } else { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 56b58cd8ced6c..9ff0f6273171b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -95,11 +95,7 @@ public SearchResponse(StreamInput in) throws IOException { scrollId = in.readOptionalString(); tookInMillis = in.readVLong(); skippedShards = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - pointInTimeId = in.readOptionalString(); - } else { - pointInTimeId = null; - } + pointInTimeId = in.readOptionalString(); } public SearchResponse( @@ -436,9 +432,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(scrollId); out.writeVLong(tookInMillis); out.writeVInt(skippedShards); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeOptionalString(pointInTimeId); - } + out.writeOptionalString(pointInTimeId); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/CanMatchShardResponse.java b/server/src/main/java/org/elasticsearch/search/CanMatchShardResponse.java index a42fa9d8eec36..d2b0cc3d7c189 100644 --- a/server/src/main/java/org/elasticsearch/search/CanMatchShardResponse.java +++ b/server/src/main/java/org/elasticsearch/search/CanMatchShardResponse.java @@ -8,7 +8,6 @@ package org.elasticsearch.search; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.sort.MinAndMax; @@ -25,11 +24,7 @@ public final class CanMatchShardResponse extends SearchPhaseResult { public CanMatchShardResponse(StreamInput in) throws IOException { super(in); this.canMatch = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - estimatedMinAndMax = in.readOptionalWriteable(MinAndMax::new); - } else { - estimatedMinAndMax = null; - } + estimatedMinAndMax = in.readOptionalWriteable(MinAndMax::new); } public CanMatchShardResponse(boolean canMatch, MinAndMax estimatedMinAndMax) { @@ -40,9 +35,7 @@ public CanMatchShardResponse(boolean canMatch, MinAndMax estimatedMinAndMax) @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(canMatch); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - out.writeOptionalWriteable(estimatedMinAndMax); - } + out.writeOptionalWriteable(estimatedMinAndMax); } public boolean canMatch() { diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 013c3587e84f5..7f08089d0c768 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -248,11 +248,7 @@ public DateTime(StreamInput in) throws IOException { */ in.readBoolean(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - this.formatSortValues = in.readBoolean(); - } else { - this.formatSortValues = false; - } + this.formatSortValues = in.readBoolean(); } @Override @@ -272,9 +268,7 @@ public void writeTo(StreamOutput out) throws IOException { */ out.writeBoolean(false); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - out.writeBoolean(formatSortValues); - } + out.writeBoolean(formatSortValues); } public DateMathParser getDateMathParser() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java index af7e450ac8bda..c26cbeead3dcd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.bucket.composite; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -56,9 +55,7 @@ public abstract class CompositeValuesSourceBuilder builder, StreamOutput } else if (builder.getClass() == HistogramValuesSourceBuilder.class) { code = 2; } else if (builder.getClass() == GeoTileGridValuesSourceBuilder.class) { - if (out.getTransportVersion().before(TransportVersions.V_7_5_0)) { - throw new IOException( - "Attempting to serialize [" - + builder.getClass().getSimpleName() - + "] to a stream with unsupported version [" - + out.getTransportVersion() - + "]" - ); - } code = 3; } else { throw new IOException("invalid builder type: " + builder.getClass().getSimpleName()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index d31d3a18b3567..7a6668f39e399 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.index.IndexReader; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -105,18 +104,14 @@ protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException { super(in); dateHistogramInterval = new DateIntervalWrapper(in); timeZone = in.readOptionalZoneId(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - offset = in.readLong(); - } + offset = in.readLong(); } @Override protected void innerWriteTo(StreamOutput out) throws IOException { dateHistogramInterval.writeTo(out); out.writeOptionalZoneId(timeZone); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - out.writeLong(offset); - } + out.writeLong(offset); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index f2c601e412f92..a302c912e312a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -100,15 +99,10 @@ public InternalComposite(StreamInput in) throws IOException { formats.add(in.readNamedWriteable(DocValueFormat.class)); } this.reverseMuls = in.readIntArray(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - this.missingOrders = in.readArray(MissingOrder::readFromStream, MissingOrder[]::new); - } else { - this.missingOrders = new MissingOrder[reverseMuls.length]; - Arrays.fill(missingOrders, MissingOrder.DEFAULT); - } + this.missingOrders = in.readArray(MissingOrder::readFromStream, MissingOrder[]::new); this.buckets = in.readCollectionAsList((input) -> new InternalBucket(input, sourceNames, formats, reverseMuls, missingOrders)); this.afterKey = in.readOptionalWriteable(CompositeKey::new); - this.earlyTerminated = in.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0) ? in.readBoolean() : false; + this.earlyTerminated = in.readBoolean(); } @Override @@ -119,14 +113,10 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeNamedWriteable(format); } out.writeIntArray(reverseMuls); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - out.writeArray(missingOrders); - } + out.writeArray(missingOrders); out.writeCollection(buckets); out.writeOptionalWriteable(afterKey); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - out.writeBoolean(earlyTerminated); - } + out.writeBoolean(earlyTerminated); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java index 8bc1e3d17642a..624a87249ac53 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; @@ -91,9 +90,8 @@ public GeoGridAggregationBuilder(StreamInput in) throws IOException { precision = in.readVInt(); requiredSize = in.readVInt(); shardSize = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - geoBoundingBox = new GeoBoundingBox(in); - } + geoBoundingBox = new GeoBoundingBox(in); + } @Override @@ -111,9 +109,7 @@ protected void innerWriteTo(StreamOutput out) throws IOException { out.writeVInt(precision); out.writeVInt(requiredSize); out.writeVInt(shardSize); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - geoBoundingBox.writeTo(out); - } + geoBoundingBox.writeTo(out); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index 20223f6f92524..4f94e2061caa1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -157,9 +157,7 @@ public DateHistogramAggregationBuilder(StreamInput in) throws IOException { dateHistogramInterval = new DateIntervalWrapper(in); offset = in.readLong(); extendedBounds = in.readOptionalWriteable(LongBounds::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - hardBounds = in.readOptionalWriteable(LongBounds::new); - } + hardBounds = in.readOptionalWriteable(LongBounds::new); } @Override @@ -180,9 +178,7 @@ protected void innerWriteTo(StreamOutput out) throws IOException { dateHistogramInterval.writeTo(out); out.writeLong(offset); out.writeOptionalWriteable(extendedBounds); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeOptionalWriteable(hardBounds); - } + out.writeOptionalWriteable(hardBounds); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java index fa2c5dc219cba..ab188eaaf6257 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregationBuilder.java @@ -146,18 +146,8 @@ public HistogramAggregationBuilder(StreamInput in) throws IOException { minDocCount = in.readVLong(); interval = in.readDouble(); offset = in.readDouble(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - extendedBounds = in.readOptionalWriteable(DoubleBounds::new); - hardBounds = in.readOptionalWriteable(DoubleBounds::new); - } else { - double minBound = in.readDouble(); - double maxBound = in.readDouble(); - if (minBound == Double.POSITIVE_INFINITY && maxBound == Double.NEGATIVE_INFINITY) { - extendedBounds = null; - } else { - extendedBounds = new DoubleBounds(minBound, maxBound); - } - } + extendedBounds = in.readOptionalWriteable(DoubleBounds::new); + hardBounds = in.readOptionalWriteable(DoubleBounds::new); } @Override @@ -167,18 +157,8 @@ protected void innerWriteTo(StreamOutput out) throws IOException { out.writeVLong(minDocCount); out.writeDouble(interval); out.writeDouble(offset); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeOptionalWriteable(extendedBounds); - out.writeOptionalWriteable(hardBounds); - } else { - if (extendedBounds != null) { - out.writeDouble(extendedBounds.getMin()); - out.writeDouble(extendedBounds.getMax()); - } else { - out.writeDouble(Double.POSITIVE_INFINITY); - out.writeDouble(Double.NEGATIVE_INFINITY); - } - } + out.writeOptionalWriteable(extendedBounds); + out.writeOptionalWriteable(hardBounds); } /** Get the current interval that is set on this builder. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 524c648215345..c941e299cad40 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -22,7 +22,6 @@ import org.apache.lucene.util.automaton.RegExp; import org.apache.lucene.util.hppc.BitMixer; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -387,13 +386,6 @@ public IncludeExclude(StreamInput in) throws IOException { include = includeString == null ? null : new RegExp(includeString); String excludeString = in.readOptionalString(); exclude = excludeString == null ? null : new RegExp(excludeString); - if (in.getTransportVersion().before(TransportVersions.V_7_11_0)) { - incZeroBasedPartition = 0; - incNumPartitions = 0; - includeValues = null; - excludeValues = null; - return; - } } else { include = null; exclude = null; @@ -427,9 +419,6 @@ public void writeTo(StreamOutput out) throws IOException { if (regexBased) { out.writeOptionalString(include == null ? null : include.getOriginalString()); out.writeOptionalString(exclude == null ? null : exclude.getOriginalString()); - if (out.getTransportVersion().before(TransportVersions.V_7_11_0)) { - return; - } } boolean hasIncludes = includeValues != null; out.writeBoolean(hasIncludes); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java index fe41df12feafa..36107aeb7b7a9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.bucket.terms; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -63,14 +62,10 @@ protected InternalMappedTerms( */ protected InternalMappedTerms(StreamInput in, Bucket.Reader bucketReader) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_15_0)) { - if (in.readBoolean()) { - docCountError = in.readZLong(); - } else { - docCountError = null; - } - } else { + if (in.readBoolean()) { docCountError = in.readZLong(); + } else { + docCountError = null; } format = in.readNamedWriteable(DocValueFormat.class); shardSize = readSize(in); @@ -81,15 +76,11 @@ protected InternalMappedTerms(StreamInput in, Bucket.Reader bucketReader) thr @Override protected final void writeTermTypeInfoTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_15_0)) { - if (docCountError != null) { - out.writeBoolean(true); - out.writeZLong(docCountError); - } else { - out.writeBoolean(false); - } + if (docCountError != null) { + out.writeBoolean(true); + out.writeZLong(docCountError); } else { - out.writeZLong(docCountError == null ? 0 : docCountError); + out.writeBoolean(false); } out.writeNamedWriteable(format); writeSize(shardSize, out); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 1d32251ffc33a..4cd963296ab12 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.search.aggregations.bucket.terms; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -213,20 +212,14 @@ protected InternalTerms( protected InternalTerms(StreamInput in) throws IOException { super(in); reduceOrder = InternalOrder.Streams.readOrder(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - order = InternalOrder.Streams.readOrder(in); - } else { - order = reduceOrder; - } + order = InternalOrder.Streams.readOrder(in); requiredSize = readSize(in); minDocCount = in.readVLong(); } @Override protected final void doWriteTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - reduceOrder.writeTo(out); - } + reduceOrder.writeTo(out); order.writeTo(out); writeSize(requiredSize, out); out.writeVLong(minDocCount); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java index 53b8c5ce11e9f..a10033e34ff50 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -146,14 +145,7 @@ public static > ConstructingO this.valuesField = valuesField; values = in.readDoubleArray(); keyed = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_8_0)) { - percentilesConfig = (PercentilesConfig) in.readOptionalWriteable((Reader) PercentilesConfig::fromStream); - } else { - int numberOfSignificantValueDigits = in.readVInt(); - double compression = in.readDouble(); - PercentilesMethod method = PercentilesMethod.readFromStream(in); - percentilesConfig = PercentilesConfig.fromLegacy(method, compression, numberOfSignificantValueDigits); - } + percentilesConfig = (PercentilesConfig) in.readOptionalWriteable((Reader) PercentilesConfig::fromStream); } @Override @@ -165,23 +157,7 @@ public boolean supportsSampling() { protected void innerWriteTo(StreamOutput out) throws IOException { out.writeDoubleArray(values); out.writeBoolean(keyed); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_8_0)) { - out.writeOptionalWriteable(percentilesConfig); - } else { - // Legacy method serialized both SigFigs and compression, even though we only need one. So we need - // to serialize the default for the unused method - int numberOfSignificantValueDigits = percentilesConfig.getMethod().equals(PercentilesMethod.HDR) - ? ((PercentilesConfig.Hdr) percentilesConfig).getNumberOfSignificantValueDigits() - : PercentilesConfig.Hdr.DEFAULT_NUMBER_SIG_FIGS; - - double compression = percentilesConfig.getMethod().equals(PercentilesMethod.TDIGEST) - ? ((PercentilesConfig.TDigest) percentilesConfig).getCompression() - : PercentilesConfig.TDigest.DEFAULT_COMPRESSION; - - out.writeVInt(numberOfSignificantValueDigits); - out.writeDouble(compression); - percentilesConfig.getMethod().writeTo(out); - } + out.writeOptionalWriteable(percentilesConfig); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java index 15f10b3a6d110..20da8b18d8faf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java @@ -8,8 +8,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.apache.lucene.geo.GeoEncodingUtils; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.io.stream.StreamInput; @@ -26,20 +24,6 @@ */ public class InternalGeoCentroid extends InternalCentroid implements GeoCentroid { - private static long encodeLatLon(double lat, double lon) { - return (Integer.toUnsignedLong(GeoEncodingUtils.encodeLatitude(lat)) << 32) | Integer.toUnsignedLong( - GeoEncodingUtils.encodeLongitude(lon) - ); - } - - private static double decodeLatitude(long encodedLatLon) { - return GeoEncodingUtils.decodeLatitude((int) (encodedLatLon >>> 32)); - } - - private static double decodeLongitude(long encodedLatLon) { - return GeoEncodingUtils.decodeLongitude((int) (encodedLatLon & 0xFFFFFFFFL)); - } - public InternalGeoCentroid(String name, SpatialPoint centroid, long count, Map metadata) { super( name, @@ -64,22 +48,13 @@ public static InternalGeoCentroid empty(String name, Map metadat @Override protected GeoPoint centroidFromStream(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_2_0)) { - return new GeoPoint(in.readDouble(), in.readDouble()); - } else { - final long hash = in.readLong(); - return new GeoPoint(decodeLatitude(hash), decodeLongitude(hash)); - } + return new GeoPoint(in.readDouble(), in.readDouble()); } @Override protected void centroidToStream(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_2_0)) { - out.writeDouble(centroid.getY()); - out.writeDouble(centroid.getX()); - } else { - out.writeLong(encodeLatLon(centroid.getY(), centroid.getX())); - } + out.writeDouble(centroid.getY()); + out.writeDouble(centroid.getX()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java index 1580c9ce22a29..cc1ca641750e7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; @@ -27,8 +26,6 @@ import java.util.Map; import java.util.Objects; -import static java.util.Collections.singletonList; - public class InternalScriptedMetric extends InternalAggregation implements ScriptedMetric { final Script reduceScript; private final List aggregations; @@ -45,30 +42,13 @@ public class InternalScriptedMetric extends InternalAggregation implements Scrip public InternalScriptedMetric(StreamInput in) throws IOException { super(in); reduceScript = in.readOptionalWriteable(Script::new); - if (in.getTransportVersion().before(TransportVersions.V_7_8_0)) { - aggregations = singletonList(in.readGenericValue()); - } else { - aggregations = in.readCollectionAsList(StreamInput::readGenericValue); - } + aggregations = in.readCollectionAsList(StreamInput::readGenericValue); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(reduceScript); - if (out.getTransportVersion().before(TransportVersions.V_7_8_0)) { - if (aggregations.size() > 1) { - /* - * If aggregations has more than one entry we're trying to - * serialize an unreduced aggregation. This *should* only - * happen when we're returning a scripted_metric over cross - * cluster search. - */ - throw new IllegalArgumentException("scripted_metric doesn't support cross cluster search until 7.8.0"); - } - out.writeGenericValue(aggregations.get(0)); - } else { - out.writeCollection(aggregations, StreamOutput::writeGenericValue); - } + out.writeCollection(aggregations, StreamOutput::writeGenericValue); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java index b8402208673d4..d946ce3e14ea1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java @@ -46,19 +46,6 @@ public static PercentilesConfig fromStream(StreamInput in) throws IOException { return method.configFromStream(in); } - /** - * Deprecated: construct a {@link PercentilesConfig} directly instead - */ - @Deprecated - public static PercentilesConfig fromLegacy(PercentilesMethod method, double compression, int numberOfSignificantDigits) { - if (method.equals(PercentilesMethod.TDIGEST)) { - return new TDigest(compression); - } else if (method.equals(PercentilesMethod.HDR)) { - return new Hdr(numberOfSignificantDigits); - } - throw new IllegalArgumentException("Unsupported percentiles algorithm [" + method + "]"); - } - public PercentilesMethod getMethod() { return method; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index 00db45e2d06b4..c5dca1271c891 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -130,10 +130,8 @@ public TopHitsAggregationBuilder(StreamInput in) throws IOException { trackScores = in.readBoolean(); version = in.readBoolean(); seqNoAndPrimaryTerm = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - if (in.readBoolean()) { - fetchFields = in.readCollectionAsList(FieldAndFormat::new); - } + if (in.readBoolean()) { + fetchFields = in.readCollectionAsList(FieldAndFormat::new); } } @@ -163,11 +161,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeBoolean(trackScores); out.writeBoolean(version); out.writeBoolean(seqNoAndPrimaryTerm); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeBoolean(fetchFields != null); - if (fetchFields != null) { - out.writeCollection(fetchFields); - } + out.writeBoolean(fetchFields != null); + if (fetchFields != null) { + out.writeCollection(fetchFields); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java index 42330b995ae94..924a5c6fa16a2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceFieldConfig.java @@ -161,26 +161,13 @@ protected MultiValuesSourceFieldConfig( } public MultiValuesSourceFieldConfig(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - this.fieldName = in.readOptionalString(); - } else { - this.fieldName = in.readString(); - } + this.fieldName = in.readOptionalString(); this.missing = in.readGenericValue(); this.script = in.readOptionalWriteable(Script::new); this.timeZone = in.readOptionalZoneId(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_8_0)) { - this.filter = in.readOptionalNamedWriteable(QueryBuilder.class); - } else { - this.filter = null; - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - this.userValueTypeHint = in.readOptionalWriteable(ValueType::readFromStream); - this.format = in.readOptionalString(); - } else { - this.userValueTypeHint = null; - this.format = null; - } + this.filter = in.readOptionalNamedWriteable(QueryBuilder.class); + this.userValueTypeHint = in.readOptionalWriteable(ValueType::readFromStream); + this.format = in.readOptionalString(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) { this.includeExclude = in.readOptionalWriteable(IncludeExclude::new); } else { @@ -222,21 +209,13 @@ public IncludeExclude getIncludeExclude() { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - out.writeOptionalString(fieldName); - } else { - out.writeString(fieldName); - } + out.writeOptionalString(fieldName); out.writeGenericValue(missing); out.writeOptionalWriteable(script); out.writeOptionalZoneId(timeZone); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_8_0)) { - out.writeOptionalNamedWriteable(filter); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - out.writeOptionalWriteable(userValueTypeHint); - out.writeOptionalString(format); - } + out.writeOptionalNamedWriteable(filter); + out.writeOptionalWriteable(userValueTypeHint); + out.writeOptionalString(format); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) { out.writeOptionalWriteable(includeExclude); } diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 069aa6ff41ae1..4fd20387004aa 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -256,15 +256,11 @@ public SearchSourceBuilder(StreamInput in) throws IOException { sliceBuilder = in.readOptionalWriteable(SliceBuilder::new); collapse = in.readOptionalWriteable(CollapseBuilder::new); trackTotalHitsUpTo = in.readOptionalInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - if (in.readBoolean()) { - fetchFields = in.readCollectionAsList(FieldAndFormat::new); - } - pointInTimeBuilder = in.readOptionalWriteable(PointInTimeBuilder::new); - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - runtimeMappings = in.readMap(); + if (in.readBoolean()) { + fetchFields = in.readCollectionAsList(FieldAndFormat::new); } + pointInTimeBuilder = in.readOptionalWriteable(PointInTimeBuilder::new); + runtimeMappings = in.readMap(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { if (in.getTransportVersion().before(TransportVersions.V_8_7_0)) { KnnSearchBuilder searchBuilder = in.readOptionalWriteable(KnnSearchBuilder::new); @@ -333,22 +329,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(sliceBuilder); out.writeOptionalWriteable(collapse); out.writeOptionalInt(trackTotalHitsUpTo); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeBoolean(fetchFields != null); - if (fetchFields != null) { - out.writeCollection(fetchFields); - } - out.writeOptionalWriteable(pointInTimeBuilder); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - out.writeGenericMap(runtimeMappings); - } else { - if (false == runtimeMappings.isEmpty()) { - throw new IllegalArgumentException( - "Versions before 7110099 don't support [runtime_mappings] and search was sent to [" + out.getTransportVersion() + "]" - ); - } + out.writeBoolean(fetchFields != null); + if (fetchFields != null) { + out.writeCollection(fetchFields); } + out.writeOptionalWriteable(pointInTimeBuilder); + out.writeGenericMap(runtimeMappings); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { if (out.getTransportVersion().before(TransportVersions.V_8_7_0)) { if (knnSearch.size() > 1) { diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index 3269de91da606..ea662e37ffa9a 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -53,9 +53,7 @@ public DfsSearchResult(StreamInput in) throws IOException { fieldStatistics = readFieldStats(in); maxDoc = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - setShardSearchRequest(in.readOptionalWriteable(ShardSearchRequest::new)); - } + setShardSearchRequest(in.readOptionalWriteable(ShardSearchRequest::new)); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) { knnResults = in.readOptionalCollectionAsList(DfsKnnResults::new); @@ -135,9 +133,7 @@ public void writeTo(StreamOutput out) throws IOException { writeTermStats(out, termStatistics); writeFieldStats(out, fieldStatistics); out.writeVInt(maxDoc); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeOptionalWriteable(getShardSearchRequest()); - } + out.writeOptionalWriteable(getShardSearchRequest()); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) { out.writeOptionalCollection(knnResults); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index 725b723b5155f..aa5c1f2cbd992 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.fetch; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.AbstractRefCounted; @@ -44,20 +43,14 @@ public FetchSearchResult(StreamInput in) throws IOException { super(in); contextId = new ShardSearchContextId(in); hits = new SearchHits(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - profileResult = in.readOptionalWriteable(ProfileResult::new); - } else { - profileResult = null; - } + profileResult = in.readOptionalWriteable(ProfileResult::new); } @Override public void writeTo(StreamOutput out) throws IOException { contextId.writeTo(out); hits.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - out.writeOptionalWriteable(profileResult); - } + out.writeOptionalWriteable(profileResult); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java index e6fc229e1e648..a0f960dc4aaad 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/ShardFetchSearchRequest.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.fetch; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; @@ -53,26 +52,18 @@ public ShardFetchSearchRequest( public ShardFetchSearchRequest(StreamInput in) throws IOException { super(in); originalIndices = OriginalIndices.readOriginalIndices(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - shardSearchRequest = in.readOptionalWriteable(ShardSearchRequest::new); - rescoreDocIds = new RescoreDocIds(in); - aggregatedDfs = in.readOptionalWriteable(AggregatedDfs::new); - } else { - shardSearchRequest = null; - rescoreDocIds = RescoreDocIds.EMPTY; - aggregatedDfs = null; - } + shardSearchRequest = in.readOptionalWriteable(ShardSearchRequest::new); + rescoreDocIds = new RescoreDocIds(in); + aggregatedDfs = in.readOptionalWriteable(AggregatedDfs::new); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); OriginalIndices.writeOriginalIndices(originalIndices, out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeOptionalWriteable(shardSearchRequest); - rescoreDocIds.writeTo(out); - out.writeOptionalWriteable(aggregatedDfs); - } + out.writeOptionalWriteable(shardSearchRequest); + rescoreDocIds.writeTo(out); + out.writeOptionalWriteable(aggregatedDfs); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java index 82b16a9aad03a..3d4d8ba4caa71 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FieldAndFormat.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.fetch.subphase; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -133,20 +132,14 @@ public FieldAndFormat(String field, @Nullable String format, @Nullable Boolean i public FieldAndFormat(StreamInput in) throws IOException { this.field = in.readString(); format = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - this.includeUnmapped = in.readOptionalBoolean(); - } else { - this.includeUnmapped = null; - } + this.includeUnmapped = in.readOptionalBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(field); out.writeOptionalString(format); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - out.writeOptionalBoolean(this.includeUnmapped); - } + out.writeOptionalBoolean(this.includeUnmapped); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index 36cda88a063ec..5a26191b8eeec 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -161,9 +161,7 @@ protected AbstractHighlighterBuilder(StreamInput in) throws IOException { options(in.readMap()); } requireFieldMatch(in.readOptionalBoolean()); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - maxAnalyzedOffset(in.readOptionalInt()); - } + maxAnalyzedOffset(in.readOptionalInt()); } /** @@ -207,9 +205,7 @@ public final void writeTo(StreamOutput out) throws IOException { out.writeGenericMap(options); } out.writeOptionalBoolean(requireFieldMatch); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - out.writeOptionalInt(maxAnalyzedOffset); - } + out.writeOptionalInt(maxAnalyzedOffset); doWriteTo(out); } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchContextId.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchContextId.java index d4888e79acae2..37f18849f74f2 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchContextId.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchContextId.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.internal; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -34,27 +33,16 @@ public ShardSearchContextId(String sessionId, long id, String searcherId) { public ShardSearchContextId(StreamInput in) throws IOException { this.id = in.readLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_7_0)) { - this.sessionId = in.readString(); - } else { - this.sessionId = ""; - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - this.searcherId = in.readOptionalString(); - } else { - this.searcherId = null; - } + this.sessionId = in.readString(); + this.searcherId = in.readOptionalString(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeLong(id); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_7_0)) { - out.writeString(sessionId); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - out.writeOptionalString(searcherId); - } + out.writeString(sessionId); + out.writeOptionalString(searcherId); + } public String getSessionId() { diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index fe9cfdc87695e..01988003f4dd0 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -276,7 +276,7 @@ public ShardSearchRequest(StreamInput in) throws IOException { super(in); shardId = new ShardId(in); searchType = SearchType.fromId(in.readByte()); - shardRequestIndex = in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0) ? in.readVInt() : -1; + shardRequestIndex = in.readVInt(); numberOfShards = in.readVInt(); scroll = in.readOptionalWriteable(Scroll::new); source = in.readOptionalWriteable(SearchSourceBuilder::new); @@ -319,30 +319,14 @@ public ShardSearchRequest(StreamInput in) throws IOException { requestCache = in.readOptionalBoolean(); clusterAlias = in.readOptionalString(); allowPartialSearchResults = in.readBoolean(); - if (in.getTransportVersion().before(TransportVersions.V_7_11_0)) { - in.readStringArray(); - in.readOptionalString(); - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_7_0)) { - canReturnNullResponseIfMatchNoDocs = in.readBoolean(); - bottomSortValues = in.readOptionalWriteable(SearchSortValuesAndFormats::new); - readerId = in.readOptionalWriteable(ShardSearchContextId::new); - keepAlive = in.readOptionalTimeValue(); - } else { - canReturnNullResponseIfMatchNoDocs = false; - bottomSortValues = null; - readerId = null; - keepAlive = null; - } + canReturnNullResponseIfMatchNoDocs = in.readBoolean(); + bottomSortValues = in.readOptionalWriteable(SearchSortValuesAndFormats::new); + readerId = in.readOptionalWriteable(ShardSearchContextId::new); + keepAlive = in.readOptionalTimeValue(); assert keepAlive == null || readerId != null : "readerId: null keepAlive: " + keepAlive; channelVersion = TransportVersion.min(TransportVersion.readVersion(in), in.getTransportVersion()); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - waitForCheckpoint = in.readLong(); - waitForCheckpointsTimeout = in.readTimeValue(); - } else { - waitForCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; - waitForCheckpointsTimeout = SearchService.NO_TIMEOUT; - } + waitForCheckpoint = in.readLong(); + waitForCheckpointsTimeout = in.readTimeValue(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { forceSyntheticSource = in.readBoolean(); } else { @@ -367,9 +351,7 @@ protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOExce shardId.writeTo(out); out.writeByte(searchType.id()); if (asKey == false) { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - out.writeVInt(shardRequestIndex); - } + out.writeVInt(shardRequestIndex); out.writeVInt(numberOfShards); } out.writeOptionalWriteable(scroll); @@ -400,31 +382,15 @@ protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOExce out.writeOptionalBoolean(requestCache); out.writeOptionalString(clusterAlias); out.writeBoolean(allowPartialSearchResults); - if (asKey == false && out.getTransportVersion().before(TransportVersions.V_7_11_0)) { - out.writeStringArray(Strings.EMPTY_ARRAY); - out.writeOptionalString(null); - } - if (asKey == false && out.getTransportVersion().onOrAfter(TransportVersions.V_7_7_0)) { + if (asKey == false) { out.writeBoolean(canReturnNullResponseIfMatchNoDocs); out.writeOptionalWriteable(bottomSortValues); out.writeOptionalWriteable(readerId); out.writeOptionalTimeValue(keepAlive); } TransportVersion.writeVersion(channelVersion, out); - TransportVersion waitForCheckpointsVersion = TransportVersions.V_7_16_0; - if (out.getTransportVersion().onOrAfter(waitForCheckpointsVersion)) { - out.writeLong(waitForCheckpoint); - out.writeTimeValue(waitForCheckpointsTimeout); - } else if (waitForCheckpoint != SequenceNumbers.UNASSIGNED_SEQ_NO) { - throw new IllegalArgumentException( - "Remote node version [" - + out.getTransportVersion() - + " incompatible with " - + "wait_for_checkpoints. All nodes must be version [" - + waitForCheckpointsVersion - + "] or greater." - ); - } + out.writeLong(waitForCheckpoint); + out.writeTimeValue(waitForCheckpointsTimeout); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { out.writeBoolean(forceSyntheticSource); } else { diff --git a/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java b/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java index d2cd46bda315e..fd4bf691e80bf 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java +++ b/server/src/main/java/org/elasticsearch/search/profile/ProfileResult.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.profile; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -75,11 +74,7 @@ public ProfileResult(StreamInput in) throws IOException { this.description = in.readString(); this.nodeTime = in.readLong(); breakdown = in.readMap(StreamInput::readLong); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_9_0)) { - debug = in.readMap(StreamInput::readGenericValue); - } else { - debug = Map.of(); - } + debug = in.readMap(StreamInput::readGenericValue); children = in.readCollectionAsList(ProfileResult::new); } @@ -89,9 +84,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(description); out.writeLong(nodeTime); // not Vlong because can be negative out.writeMap(breakdown, StreamOutput::writeLong); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_9_0)) { - out.writeMap(debug, StreamOutput::writeGenericValue); - } + out.writeMap(debug, StreamOutput::writeGenericValue); out.writeCollection(children); } diff --git a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java index d6975811091e3..1cc6810f8e575 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java +++ b/server/src/main/java/org/elasticsearch/search/profile/SearchProfileResults.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -56,22 +55,12 @@ public SearchProfileResults(Map shardResults) } public SearchProfileResults(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - shardResults = in.readMap(SearchProfileShardResult::new); - } else { - // Before 8.0.0 we only send the query phase result - shardResults = in.readMap(i -> new SearchProfileShardResult(new SearchProfileQueryPhaseResult(i), null)); - } + shardResults = in.readMap(SearchProfileShardResult::new); } @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_16_0)) { - out.writeMap(shardResults, StreamOutput::writeWriteable); - } else { - // Before 8.0.0 we only send the query phase - out.writeMap(shardResults, (o, r) -> r.getQueryPhase().writeTo(o)); - } + out.writeMap(shardResults, StreamOutput::writeWriteable); } public Map getShardResults() { diff --git a/server/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java b/server/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java index 3a1a9fcbb72a5..724f771e706d9 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.query; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchShardTask; @@ -51,11 +50,7 @@ public QuerySearchRequest(StreamInput in) throws IOException { contextId = new ShardSearchContextId(in); dfs = new AggregatedDfs(in); originalIndices = OriginalIndices.readOriginalIndices(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - this.shardSearchRequest = in.readOptionalWriteable(ShardSearchRequest::new); - } else { - this.shardSearchRequest = null; - } + this.shardSearchRequest = in.readOptionalWriteable(ShardSearchRequest::new); } @Override @@ -64,9 +59,7 @@ public void writeTo(StreamOutput out) throws IOException { contextId.writeTo(out); dfs.writeTo(out); OriginalIndices.writeOriginalIndices(originalIndices, out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeOptionalWriteable(shardSearchRequest); - } + out.writeOptionalWriteable(shardSearchRequest); } public ShardSearchContextId contextId() { diff --git a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 301d7fb219ca7..40d4e37045016 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -87,11 +87,7 @@ public QuerySearchResult(StreamInput in) throws IOException { */ public QuerySearchResult(StreamInput in, boolean delayedAggregations) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_7_0)) { - isNull = in.readBoolean(); - } else { - isNull = false; - } + isNull = in.readBoolean(); if (isNull == false) { ShardSearchContextId id = new ShardSearchContextId(in); readFromWithId(id, in, delayedAggregations); @@ -399,10 +395,8 @@ private void readFromWithId(ShardSearchContextId id, StreamInput in, boolean del hasProfileResults = profileShardResults != null; serviceTimeEWMA = in.readZLong(); nodeQueueSize = in.readInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - setShardSearchRequest(in.readOptionalWriteable(ShardSearchRequest::new)); - setRescoreDocIds(new RescoreDocIds(in)); - } + setShardSearchRequest(in.readOptionalWriteable(ShardSearchRequest::new)); + setRescoreDocIds(new RescoreDocIds(in)); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { rankShardResult = in.readOptionalNamedWriteable(RankShardResult.class); } @@ -421,9 +415,7 @@ public void writeTo(StreamOutput out) throws IOException { if (aggregations != null && aggregations.isSerialized()) { throw new IllegalStateException("cannot send serialized version since it will leak"); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_7_0)) { - out.writeBoolean(isNull); - } + out.writeBoolean(isNull); if (isNull == false) { contextId.writeTo(out); writeToNoId(out); @@ -454,10 +446,8 @@ public void writeToNoId(StreamOutput out) throws IOException { out.writeOptionalWriteable(profileShardResults); out.writeZLong(serviceTimeEWMA); out.writeInt(nodeQueueSize); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_10_0)) { - out.writeOptionalWriteable(getShardSearchRequest()); - getRescoreDocIds().writeTo(out); - } + out.writeOptionalWriteable(getShardSearchRequest()); + getRescoreDocIds().writeTo(out); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeOptionalNamedWriteable(rankShardResult); } else if (rankShardResult != null) { diff --git a/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java b/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java index dc24faad10857..ce063dc1ab243 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -90,25 +89,14 @@ public SliceBuilder(String field, int id, int max) { } public SliceBuilder(StreamInput in) throws IOException { - if (in.getTransportVersion().before(TransportVersions.V_7_15_0)) { - field = in.readString(); - } else { - field = in.readOptionalString(); - } - - this.id = in.readVInt(); - this.max = in.readVInt(); + field = in.readOptionalString(); + id = in.readVInt(); + max = in.readVInt(); } @Override public void writeTo(StreamOutput out) throws IOException { - // Before 7.15.0 we always defaulted to _id when the field wasn't provided - if (out.getTransportVersion().before(TransportVersions.V_7_15_0)) { - String sliceField = field != null ? field : IdFieldMapper.NAME; - out.writeString(sliceField); - } else { - out.writeOptionalString(field); - } + out.writeOptionalString(field); out.writeVInt(id); out.writeVInt(max); } diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index b8dc104c07316..b0a3a558e2956 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -145,12 +145,8 @@ public FieldSortBuilder(StreamInput in) throws IOException { sortMode = in.readOptionalWriteable(SortMode::readFromStream); unmappedType = in.readOptionalString(); nestedSort = in.readOptionalWriteable(NestedSortBuilder::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_2_0)) { - numericType = in.readOptionalString(); - } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - format = in.readOptionalString(); - } + numericType = in.readOptionalString(); + format = in.readOptionalString(); } @Override @@ -165,16 +161,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(sortMode); out.writeOptionalString(unmappedType); out.writeOptionalWriteable(nestedSort); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_2_0)) { - out.writeOptionalString(numericType); - } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_13_0)) { - out.writeOptionalString(format); - } else { - if (format != null) { - throw new IllegalArgumentException("Custom format for output of sort fields requires all nodes on 8.0 or later"); - } - } + out.writeOptionalString(numericType); + out.writeOptionalString(format); } /** Returns the document field this sort should be based on. */ diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortValue.java b/server/src/main/java/org/elasticsearch/search/sort/SortValue.java index ab7dcd6615f79..815da73656917 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortValue.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortValue.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.sort; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -299,13 +298,6 @@ private BytesSortValue(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.V_7_11_0)) { - throw new IllegalArgumentException( - "transport versions before [7110099] can't handle non-numeric sort values, attempted to send to [" - + out.getTransportVersion() - + "]" - ); - } out.writeBytesRef(key); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java index 7244e711544ac..719b0e3da58c9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridTests.java @@ -8,23 +8,10 @@ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoBoundingBoxTests; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; -import org.elasticsearch.test.TransportVersionUtils; - -import java.util.Collections; - -import static org.hamcrest.Matchers.equalTo; public class GeoHashGridTests extends BaseAggregationTestCase { @@ -49,29 +36,4 @@ protected GeoHashGridAggregationBuilder createTestAggregatorBuilder() { return factory; } - public void testSerializationPreBounds() throws Exception { - TransportVersion noBoundsSupportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_0_0, - TransportVersions.V_7_5_0 - ); - GeoHashGridAggregationBuilder builder = createTestAggregatorBuilder(); - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setTransportVersion(TransportVersions.V_7_6_0); - builder.writeTo(output); - try ( - StreamInput in = new NamedWriteableAwareStreamInput( - output.bytes().streamInput(), - new NamedWriteableRegistry(Collections.emptyList()) - ) - ) { - in.setTransportVersion(noBoundsSupportVersion); - GeoHashGridAggregationBuilder readBuilder = new GeoHashGridAggregationBuilder(in); - assertThat( - readBuilder.geoBoundingBox(), - equalTo(new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN))) - ); - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoTileGridTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoTileGridTests.java index 5e4e4870b1ef7..f19332f831c16 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoTileGridTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoTileGridTests.java @@ -8,24 +8,11 @@ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoBoundingBoxTests; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils; -import org.elasticsearch.test.TransportVersionUtils; - -import java.util.Collections; - -import static org.hamcrest.Matchers.equalTo; public class GeoTileGridTests extends BaseAggregationTestCase { @@ -49,29 +36,4 @@ protected GeoTileGridAggregationBuilder createTestAggregatorBuilder() { return factory; } - public void testSerializationPreBounds() throws Exception { - TransportVersion noBoundsSupportVersion = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_0_0, - TransportVersions.V_7_5_0 - ); - GeoTileGridAggregationBuilder builder = createTestAggregatorBuilder(); - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setTransportVersion(TransportVersions.V_7_6_0); - builder.writeTo(output); - try ( - StreamInput in = new NamedWriteableAwareStreamInput( - output.bytes().streamInput(), - new NamedWriteableRegistry(Collections.emptyList()) - ) - ) { - in.setTransportVersion(noBoundsSupportVersion); - GeoTileGridAggregationBuilder readBuilder = new GeoTileGridAggregationBuilder(in); - assertThat( - readBuilder.geoBoundingBox(), - equalTo(new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN))) - ); - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java index 4c4ac418c949b..7b9c79dfa6448 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; @@ -21,14 +20,11 @@ import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.test.TransportVersionUtils; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -39,7 +35,6 @@ import java.util.function.Supplier; import static java.util.Collections.singletonList; -import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; public class InternalScriptedMetricTests extends InternalAggregationTestCase { @@ -274,36 +269,4 @@ protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) return new InternalScriptedMetric(name, aggregationsList, reduceScript, metadata); } - public void testOldSerialization() throws IOException { - // A single element list looks like a fully reduced agg - InternalScriptedMetric original = new InternalScriptedMetric("test", List.of("foo"), new Script("test"), null); - InternalScriptedMetric roundTripped = (InternalScriptedMetric) copyNamedWriteable( - original, - getNamedWriteableRegistry(), - InternalAggregation.class, - TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_0_0, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_7_8_0) - ) - ); - assertThat(roundTripped, equalTo(original)); - - // A multi-element list looks like a non-reduced agg - InternalScriptedMetric unreduced = new InternalScriptedMetric("test", List.of("foo", "bar"), new Script("test"), null); - Exception e = expectThrows( - IllegalArgumentException.class, - () -> copyNamedWriteable( - unreduced, - getNamedWriteableRegistry(), - InternalAggregation.class, - TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_0_0, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_7_8_0) - ) - ) - ); - assertThat(e.getMessage(), equalTo("scripted_metric doesn't support cross cluster search until 7.8.0")); - } } diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 7c8496d0f4b20..ee7cf7a60d639 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.builder; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.admin.cluster.stats.SearchUsageStats; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -52,7 +51,6 @@ import org.elasticsearch.search.vectors.KnnSearchBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.usage.SearchUsageHolder; import org.elasticsearch.usage.UsageService; import org.elasticsearch.xcontent.ToXContent; @@ -125,17 +123,6 @@ public void testSerialization() throws IOException { assertNotSame(copy, original); } - public void testSerializingWithRuntimeFieldsBeforeSupportedThrows() { - SearchSourceBuilder original = new SearchSourceBuilder().runtimeMappings(randomRuntimeMappings()); - TransportVersion v = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_0_0, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_7_11_0) - ); - Exception e = expectThrows(IllegalArgumentException.class, () -> copyBuilder(original, v)); - assertThat(e.getMessage(), equalTo("Versions before 7110099 don't support [runtime_mappings] and search was sent to [" + v + "]")); - } - public void testShallowCopy() { for (int i = 0; i < 10; i++) { SearchSourceBuilder original = createSearchSourceBuilder(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/SortValueTests.java b/server/src/test/java/org/elasticsearch/search/sort/SortValueTests.java index 3539cfdc9709c..b447fb97b147d 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SortValueTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SortValueTests.java @@ -10,8 +10,6 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.InetAddresses; @@ -19,7 +17,6 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.test.AbstractNamedWriteableTestCase; -import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -222,20 +219,6 @@ public void testBytes() { assertThat(SortValue.from(new BytesRef(r)), greaterThan(SortValue.from(new BytesRef(new byte[] {})))); } - public void testSerializeBytesToOldVersion() { - SortValue value = SortValue.from(new BytesRef("can't send me!")); - TransportVersion version = TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_0_0, - TransportVersions.V_7_10_1 - ); - Exception e = expectThrows(IllegalArgumentException.class, () -> copyInstance(value, version)); - assertThat( - e.getMessage(), - equalTo("transport versions before [7110099] can't handle non-numeric sort values, attempted to send to [" + version + "]") - ); - } - public String toXContent(SortValue sortValue, DocValueFormat format) { return Strings.toString(new ToXContentFragment() { @Override From 36869d417d54e8e19ccf5ec613468059e55cd958 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Thu, 30 Nov 2023 16:21:44 -0500 Subject: [PATCH 120/263] Adding changelog for PR #102093 --- docs/changelog/102093.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 docs/changelog/102093.yaml diff --git a/docs/changelog/102093.yaml b/docs/changelog/102093.yaml new file mode 100644 index 0000000000000..f6922c0d36be6 --- /dev/null +++ b/docs/changelog/102093.yaml @@ -0,0 +1,14 @@ +pr: 102093 +summary: Add byte quantization for float vectors in HNSW +area: Vector Search +type: feature +issues: [] +highlight: + title: Add new `int8_hsnw` index type for int8 quantization for HNSW + body: |- + This commit adds a new index type called `int8_hnsw`. This index will + automatically quantized float32 values into int8 byte values. While + this increases disk usage by 25%, it reduces memory required for + fast HNSW search by 75%. Dramatically reducing the resource overhead + required for dense vector search. + notable: true From 6787765c372ff0f5b8cdd39e4f11b5a30ea3ba9d Mon Sep 17 00:00:00 2001 From: Jon Date: Thu, 30 Nov 2023 15:47:33 -0600 Subject: [PATCH 121/263] Update IronBank BASE_IMAGE with ironbank prefix (#102720) This supports local testing. It should not be included in hardening_manifest.yml, which injects the scope at runtime. --- distribution/docker/src/docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index b62fa983dd480..8fac93211d82b 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -21,7 +21,7 @@ <% if (docker_base == 'iron_bank') { %> ARG BASE_REGISTRY=registry1.dso.mil -ARG BASE_IMAGE=redhat/ubi/ubi9 +ARG BASE_IMAGE=ironbank/redhat/ubi/ubi9 ARG BASE_TAG=9.2 <% } %> From df1b8f4c7147bdab52a92b2dc466ffc556a23a18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 1 Dec 2023 08:18:14 +0100 Subject: [PATCH 122/263] Java REST tests: more version -> feature update (#102761) --- .../test/rest/ESRestTestCase.java | 51 ++++++++++++++++++- .../test/rest/RestTestLegacyFeatures.java | 8 ++- ...MLModelDeploymentFullClusterRestartIT.java | 6 ++- .../upgrades/AbstractUpgradeTestCase.java | 16 ++++-- .../ApiKeyBackwardsCompatibilityIT.java | 43 ++++++++++------ .../upgrades/MLModelDeploymentsUpgradeIT.java | 11 +++- .../MlAssignmentPlannerUpgradeIT.java | 6 ++- .../SearchableSnapshotsRollingUpgradeIT.java | 40 +++++++-------- .../upgrades/TransformSurvivesUpgradeIT.java | 2 +- 9 files changed, 134 insertions(+), 49 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 2d0abaa5cf4ca..97f0b45fae462 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -21,6 +21,8 @@ import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; import org.elasticsearch.Build; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; @@ -109,6 +111,7 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Predicate; +import java.util.function.Supplier; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -2107,7 +2110,53 @@ protected static IndexVersion minimumIndexVersion() throws IOException { return minVersion; } - private static Optional parseLegacyVersion(String version) { + /** + * Returns the minimum transport version among all nodes of the cluster + */ + protected static TransportVersion minimumTransportVersion() throws IOException { + Response response = client.performRequest(new Request("GET", "_nodes")); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + Map nodesAsMap = objectPath.evaluate("nodes"); + + TransportVersion minTransportVersion = null; + for (String id : nodesAsMap.keySet()) { + + var transportVersion = getTransportVersionWithFallback( + objectPath.evaluate("nodes." + id + ".version"), + objectPath.evaluate("nodes." + id + ".transport_version"), + () -> TransportVersions.MINIMUM_COMPATIBLE + ); + if (minTransportVersion == null || minTransportVersion.after(transportVersion)) { + minTransportVersion = transportVersion; + } + } + + assertNotNull(minTransportVersion); + return minTransportVersion; + } + + protected static TransportVersion getTransportVersionWithFallback( + String versionField, + Object transportVersionField, + Supplier fallbackSupplier + ) { + if (transportVersionField instanceof Number transportVersionId) { + return TransportVersion.fromId(transportVersionId.intValue()); + } else if (transportVersionField instanceof String transportVersionString) { + return TransportVersion.fromString(transportVersionString); + } else { // no transport_version field + // The response might be from a node <8.8.0, but about a node >=8.8.0 + // In that case the transport_version field won't exist. Use version, but only for <8.8.0: after that versions diverge. + var version = parseLegacyVersion(versionField); + assert version.isPresent(); + if (version.get().before(Version.V_8_8_0)) { + return TransportVersion.fromId(version.get().id); + } + } + return fallbackSupplier.get(); + } + + protected static Optional parseLegacyVersion(String version) { var semanticVersionMatcher = SEMANTIC_VERSION_PATTERN.matcher(version); if (semanticVersionMatcher.matches()) { return Optional.of(Version.fromString(semanticVersionMatcher.group(1))); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index 60653d32e1e38..bd19757bac438 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -31,6 +31,10 @@ public class RestTestLegacyFeatures implements FeatureSpecification { "indices.delete_template_multiple_names_supported" ); + // QA - rolling upgrade tests + public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); + public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); + @Override public Map getHistoricalFeatures() { return Map.ofEntries( @@ -39,7 +43,9 @@ public Map getHistoricalFeatures() { entry(HIDDEN_INDICES_SUPPORTED, Version.V_7_7_0), entry(COMPONENT_TEMPLATE_SUPPORTED, Version.V_7_8_0), entry(DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED, Version.V_7_13_0), - entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0) + entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0), + entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), + entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0) ); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java index 2b3724eeae8d0..f67d1e4c37b28 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.junit.Before; @@ -90,7 +91,10 @@ protected Settings restClientSettings() { } public void testDeploymentSurvivesRestart() throws Exception { - assumeTrue("NLP model deployments added in 8.0", getOldClusterVersion().onOrAfter(Version.V_8_0_0)); + @UpdateForV9 // upgrade will always be from v8, condition can be removed + var originalClusterAtLeastV8 = getOldClusterVersion().onOrAfter(Version.V_8_0_0); + // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW + assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); String modelId = "trained-model-full-cluster-restart"; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java index 865ba0c07cfeb..128fd8b47722f 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.test.SecuritySettingsSourceField; import org.junit.Before; @@ -31,13 +32,20 @@ public abstract class AbstractUpgradeTestCase extends ESRestTestCase { ); protected static final String UPGRADE_FROM_VERSION = System.getProperty("tests.upgrade_from_version"); - protected static final boolean SKIP_ML_TESTS = Booleans.parseBoolean(System.getProperty("tests.ml.skip", "false")); - // TODO: replace with feature testing - @Deprecated + protected static boolean isOriginalCluster(String clusterVersion) { + return UPGRADE_FROM_VERSION.equals(clusterVersion); + } + + @Deprecated(forRemoval = true) + @UpdateForV9 + // Tests should be reworked to rely on features from the current cluster (old, mixed or upgraded). + // Version test against the original cluster will be removed protected static boolean isOriginalClusterVersionAtLeast(Version supportedVersion) { - return Version.fromString(UPGRADE_FROM_VERSION).onOrAfter(supportedVersion); + // Always assume non-semantic versions are OK: this method will be removed in V9, we are testing the pre-upgrade cluster version, + // and non-semantic versions are always V8+ + return parseLegacyVersion(UPGRADE_FROM_VERSION).map(x -> x.onOrAfter(supportedVersion)).orElse(true); } @Override diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 850a94f7133e9..1a37f31bffe79 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -10,7 +10,8 @@ import org.apache.http.HttpHost; import org.apache.http.client.methods.HttpGet; import org.elasticsearch.Build; -import org.elasticsearch.Version; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -19,6 +20,8 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; +import org.elasticsearch.transport.RemoteClusterPortSettings; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; @@ -46,15 +49,14 @@ public class ApiKeyBackwardsCompatibilityIT extends AbstractUpgradeTestCase { - public static final Version API_KEY_SUPPORT_REMOTE_INDICES_VERSION = Build.current().isSnapshot() ? Version.V_8_8_0 : Version.V_8_9_1; - private RestClient oldVersionClient = null; private RestClient newVersionClient = null; public void testCreatingAndUpdatingApiKeys() throws Exception { assumeTrue( - "The remote_indices for API Keys are not supported before version " + API_KEY_SUPPORT_REMOTE_INDICES_VERSION, - isOriginalClusterVersionAtLeast(API_KEY_SUPPORT_REMOTE_INDICES_VERSION) == false + "The remote_indices for API Keys are not supported before transport version " + + RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY, + minimumTransportVersion().before(RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) ); switch (CLUSTER_TYPE) { case OLD -> { @@ -182,8 +184,8 @@ private Tuple createOrGrantApiKey(RestClient client, String role "name": "%s", "role_descriptors": %s }""", name, roles); - // Grant API did not exist before 7.7.0 - final boolean grantApiKey = randomBoolean() && isOriginalClusterVersionAtLeast(Version.V_7_7_0); + + final boolean grantApiKey = randomBoolean(); if (grantApiKey) { createApiKeyRequest = new Request("POST", "/_security/api_key/grant"); createApiKeyRequest.setJsonEntity(org.elasticsearch.common.Strings.format(""" @@ -220,16 +222,16 @@ private void updateOrBulkUpdateApiKey(String id, String roles) throws IOExceptio private boolean isUpdateApiSupported(RestClient client) { return switch (CLUSTER_TYPE) { - case OLD -> isOriginalClusterVersionAtLeast(Version.V_8_4_0); // Update API was introduced in 8.4.0. - case MIXED -> isOriginalClusterVersionAtLeast(Version.V_8_4_0) || client == newVersionClient; + case OLD -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_UPDATE_API_KEY); // Update API was introduced in 8.4.0. + case MIXED -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_UPDATE_API_KEY) || client == newVersionClient; case UPGRADED -> true; }; } private boolean isBulkUpdateApiSupported(RestClient client) { return switch (CLUSTER_TYPE) { - case OLD -> isOriginalClusterVersionAtLeast(Version.V_8_5_0); // Bulk update API was introduced in 8.5.0. - case MIXED -> isOriginalClusterVersionAtLeast(Version.V_8_5_0) || client == newVersionClient; + case OLD -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_BULK_UPDATE_API_KEY); // Bulk update API was introduced in 8.5.0. + case MIXED -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_BULK_UPDATE_API_KEY) || client == newVersionClient; case UPGRADED -> true; }; } @@ -304,10 +306,21 @@ private static String randomRoleDescriptors(boolean includeRemoteIndices) { } boolean nodeSupportApiKeyRemoteIndices(Map nodeDetails) { - // TODO[lor]: the method can be kept, but we need to replace version check with features checks - String versionString = (String) nodeDetails.get("version"); - Version version = Version.fromString(versionString.replace("-SNAPSHOT", "")); - return version.onOrAfter(API_KEY_SUPPORT_REMOTE_INDICES_VERSION); + String nodeVersionString = (String) nodeDetails.get("version"); + TransportVersion transportVersion = getTransportVersionWithFallback( + nodeVersionString, + nodeDetails.get("transport_version"), + () -> TransportVersions.ZERO + ); + + if (transportVersion.equals(TransportVersions.ZERO)) { + // In cases where we were not able to find a TransportVersion, a pre-8.8.0 node answered about a newer (upgraded) node. + // In that case, the node will be current (upgraded), and remote indices are supported for sure. + var nodeIsCurrent = nodeVersionString.equals(Build.current().version()); + assertTrue(nodeIsCurrent); + return true; + } + return transportVersion.onOrAfter(RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY); } private void createClientsByVersion() throws IOException { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java index e1845e901447e..d935672e0a243 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.UpdateForV9; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -98,7 +99,10 @@ public void removeLogging() throws IOException { } public void testTrainedModelDeployment() throws Exception { - assumeTrue("NLP model deployments added in 8.0", isOriginalClusterVersionAtLeast(Version.V_8_0_0)); + @UpdateForV9 // upgrade will always be from v8, condition can be removed + var originalClusterAtLeastV8 = isOriginalClusterVersionAtLeast(Version.V_8_0_0); + // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW + assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); final String modelId = "upgrade-deployment-test"; @@ -134,7 +138,10 @@ public void testTrainedModelDeployment() throws Exception { } public void testTrainedModelDeploymentStopOnMixedCluster() throws Exception { - assumeTrue("NLP model deployments added in 8.0", isOriginalClusterVersionAtLeast(Version.V_8_0_0)); + @UpdateForV9 // upgrade will always be from v8, condition can be removed + var originalClusterAtLeastV8 = isOriginalClusterVersionAtLeast(Version.V_8_0_0); + // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW + assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); final String modelId = "upgrade-deployment-test-stop-mixed-cluster"; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java index f1a72663aaf82..657a51dfe1b95 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -67,7 +68,10 @@ public class MlAssignmentPlannerUpgradeIT extends AbstractUpgradeTestCase { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101926") public void testMlAssignmentPlannerUpgrade() throws Exception { - assumeTrue("NLP model deployments added in 8.0", isOriginalClusterVersionAtLeast(Version.V_8_0_0)); + @UpdateForV9 // upgrade will always be from v8, condition can be removed + var originalClusterAtLeastV8 = isOriginalClusterVersionAtLeast(Version.V_8_0_0); + // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW + assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); assumeFalse("This test deploys multiple models which cannot be accommodated on a single processor", IS_SINGLE_PROCESSOR_TEST); logger.info("Starting testMlAssignmentPlannerUpgrade, model size {}", RAW_MODEL_SIZE); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SearchableSnapshotsRollingUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SearchableSnapshotsRollingUpgradeIT.java index 0f25592493a1c..0c9827f649170 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SearchableSnapshotsRollingUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SearchableSnapshotsRollingUpgradeIT.java @@ -11,7 +11,6 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; @@ -160,13 +159,13 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs final var newVersionNodes = nodesIdsAndVersions.entrySet() .stream() - .filter(node -> UPGRADE_FROM_VERSION.equals(node.getValue()) == false) + .filter(node -> isOriginalCluster(node.getValue()) == false) .map(Map.Entry::getKey) .collect(Collectors.toSet()); final var originalVersionNodes = nodesIdsAndVersions.entrySet() .stream() - .filter(node -> UPGRADE_FROM_VERSION.equals(node.getValue())) + .filter(node -> isOriginalCluster(node.getValue())) .map(Map.Entry::getKey) .collect(Collectors.toSet()); @@ -288,27 +287,22 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs assertHitCount(index, equalTo(numberOfDocs * 2L)); deleteIndex(index); - if (isOriginalClusterVersionAtLeast(Version.V_7_13_0)) { - final Request request = new Request( - "GET", - "/.snapshot-blob-cache/_settings/index.routing.allocation.include._tier_preference" - ); - request.setOptions( - expectWarnings( - "this request accesses system indices: [.snapshot-blob-cache], but in a future major " - + "version, direct access to system indices will be prevented by default" - ) - ); - request.addParameter("flat_settings", "true"); + final Request request = new Request("GET", "/.snapshot-blob-cache/_settings/index.routing.allocation.include._tier_preference"); + request.setOptions( + expectWarnings( + "this request accesses system indices: [.snapshot-blob-cache], but in a future major " + + "version, direct access to system indices will be prevented by default" + ) + ); + request.addParameter("flat_settings", "true"); - final Map snapshotBlobCacheSettings = entityAsMap(adminClient().performRequest(request)); - assertThat(snapshotBlobCacheSettings, notNullValue()); - final String tierPreference = (String) extractValue( - ".snapshot-blob-cache.settings.index.routing.allocation.include._tier_preference", - snapshotBlobCacheSettings - ); - assertThat(tierPreference, equalTo("data_content,data_hot")); - } + final Map snapshotBlobCacheSettings = entityAsMap(adminClient().performRequest(request)); + assertThat(snapshotBlobCacheSettings, notNullValue()); + final String tierPreference = (String) extractValue( + ".snapshot-blob-cache.settings.index.routing.allocation.include._tier_preference", + snapshotBlobCacheSettings + ); + assertThat(tierPreference, equalTo("data_content,data_hot")); } else if (CLUSTER_TYPE.equals(ClusterType.UPGRADED)) { for (String snapshot : snapshots) { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java index c24665d812db6..78ee66fa4d327 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java @@ -235,7 +235,7 @@ private void verifyContinuousTransformHandlesData(long expectedLastCheckpoint) t private void verifyUpgradeFailsIfMixedCluster() { // upgrade tests by design are also executed with the same version, this check must be skipped in this case, see gh#39102. - if (UPGRADE_FROM_VERSION.equals(Build.current().version())) { + if (isOriginalCluster(Build.current().version())) { return; } final Request upgradeTransformRequest = new Request("POST", getTransformEndpoint() + "_upgrade"); From 93a4f04cfb1a5033b057248ce6cd07c84550b7dc Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 1 Dec 2023 09:41:31 +0100 Subject: [PATCH 123/263] Disable concurrency for sampler and diversified sampler (#102832) Sampler and Diversified sampler aggs are subject to precision errors when executed in parallel across slices. The problematic bits are the shard_size and max_docs_per_value. These are harder to work around compared to e.g. terms aggs where we can simply look at the cardinality of the field, and perhaps less important. For now, we just disable concurrency to stay on the safe side. --- docs/changelog/102832.yaml | 5 +++++ .../bucket/DiversifiedSamplerIT.java | 5 +++-- .../search/aggregations/bucket/SamplerIT.java | 3 ++- .../sampler/DiversifiedAggregationBuilder.java | 6 ++++++ .../sampler/SamplerAggregationBuilder.java | 6 ++++++ .../bucket/sampler/DiversifiedSamplerTests.java | 17 +++++++++++++++++ .../bucket/sampler/SamplerAggregatorTests.java | 10 ++++++++++ 7 files changed, 49 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/102832.yaml diff --git a/docs/changelog/102832.yaml b/docs/changelog/102832.yaml new file mode 100644 index 0000000000000..7daf22263b2e9 --- /dev/null +++ b/docs/changelog/102832.yaml @@ -0,0 +1,5 @@ +pr: 102832 +summary: Disable concurrency for sampler and diversified sampler +area: Aggregations +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java index 3a313cec29402..5a58780a24817 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -40,7 +40,7 @@ @ESIntegTestCase.SuiteScopeTestCase public class DiversifiedSamplerIT extends ESIntegTestCase { - public static final int NUM_SHARDS = 2; + private static final int NUM_SHARDS = 1; public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SamplerAggregator.ExecutionMode.values()).toString(); @@ -83,8 +83,9 @@ public void setupSuiteScopeCluster() throws Exception { prepareIndex("idx_unmapped_author").setId("" + i) .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); + // frequent refresh makes it more likely that more segments are created, hence we may parallelize the search across slices + indicesAdmin().refresh(new RefreshRequest()).get(); } - indicesAdmin().refresh(new RefreshRequest("test")).get(); } public void testIssue10719() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java index 7f46856cdd594..00779ba9b256e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java @@ -81,8 +81,9 @@ public void setupSuiteScopeCluster() throws Exception { prepareIndex("idx_unmapped_author").setId("" + i) .setSource("name", parts[2], "genre", parts[8], "price", Float.parseFloat(parts[3])) .get(); + // frequent refresh makes it more likely that more segments are created, hence we may parallelize the search across slices + indicesAdmin().refresh(new RefreshRequest()).get(); } - indicesAdmin().refresh(new RefreshRequest("test")).get(); } public void testIssue10719() throws Exception { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java index e77b15e1ed1d4..0eecdc9e2a6e5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedAggregationBuilder.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.util.Map; import java.util.Objects; +import java.util.function.ToLongFunction; public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilder { public static final String NAME = "diversified_sampler"; @@ -189,4 +190,9 @@ public String getType() { public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ZERO; } + + @Override + public boolean supportsParallelCollection(ToLongFunction fieldCardinalityResolver) { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java index 5c3208418df08..0f85e5e11064c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.Map; import java.util.Objects; +import java.util.function.ToLongFunction; public class SamplerAggregationBuilder extends AbstractAggregationBuilder { public static final String NAME = "sampler"; @@ -141,4 +142,9 @@ public String getType() { public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ZERO; } + + @Override + public boolean supportsParallelCollection(ToLongFunction fieldCardinalityResolver) { + return false; + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index 797ace3f2b37c..6ac538f6c7ce9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -199,4 +199,21 @@ public void testDiversifiedSampler_noDocs() throws Exception { indexReader.close(); directory.close(); } + + public void testSupportsParallelCollection() { + DiversifiedAggregationBuilder sampler = new DiversifiedAggregationBuilder("name"); + if (randomBoolean()) { + sampler.field("field"); + } + if (randomBoolean()) { + sampler.maxDocsPerValue(randomIntBetween(1, 1000)); + } + if (randomBoolean()) { + sampler.subAggregation(new TermsAggregationBuilder("name").field("field")); + } + if (randomBoolean()) { + sampler.shardSize(randomIntBetween(1, 1000)); + } + assertFalse(sampler.supportsParallelCollection(null)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java index 220c863def228..722a510ce381e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java @@ -132,4 +132,14 @@ public void testEmptyParentBucket() throws Exception { } } + public void testSupportsParallelCollection() { + SamplerAggregationBuilder sampler = new SamplerAggregationBuilder("name"); + if (randomBoolean()) { + sampler.subAggregation(new TermsAggregationBuilder("name").field("field")); + } + if (randomBoolean()) { + sampler.shardSize(randomIntBetween(1, 1000)); + } + assertFalse(sampler.supportsParallelCollection(null)); + } } From 1b6c85326fa84a8319c5bb7d1b1e468a3eb24ba8 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 1 Dec 2023 08:57:39 +0000 Subject: [PATCH 124/263] [ML] Add internal inference action for ml models an services (#102731) Add a new internal CoordinatedInferenceAction that routes model inference request to either the _inference API or the ml _infer APIs. This action is used by the Inference ingest processor, the text expansion query (ELSER) and the knn vector builder (for text embedding models). --- docs/changelog/102731.yaml | 5 + .../inference/InferenceService.java | 3 + .../inference/InferenceServiceExtension.java | 30 ++ .../inference/InferenceServiceRegistry.java | 12 +- .../org/elasticsearch/inference/TaskType.java | 18 +- .../elasticsearch/node/NodeConstruction.java | 12 +- .../plugins/InferenceServicePlugin.java | 43 -- .../core/src/main/java/module-info.java | 2 + .../action/DeleteInferenceModelAction.java | 2 +- .../action/GetInferenceModelAction.java | 7 +- .../inference/action/InferenceAction.java | 8 +- .../action/PutInferenceModelAction.java | 2 +- .../results/LegacyTextEmbeddingResults.java | 5 +- .../results/SparseEmbeddingResults.java | 2 +- .../results/TextEmbeddingResults.java | 2 +- .../ml/action/CoordinatedInferenceAction.java | 242 +++++++++++ .../trainedmodel/EmptyConfigUpdate.java | 2 + ...oordinatedInferenceActionRequestTests.java | 88 ++++ .../action/InferModelActionRequestTests.java | 2 +- .../inference/MockInferenceServiceIT.java | 26 +- .../qa/test-service-plugin/build.gradle | 1 + .../mock/TestInferenceServiceExtension.java | 311 ++++++++++++++ .../mock/TestInferenceServicePlugin.java | 395 +----------------- ...search.inference.InferenceServiceExtension | 1 + .../integration/ModelRegistryIT.java | 4 +- .../inference/src/main/java/module-info.java | 4 +- .../InferenceNamedWriteablesProvider.java | 6 +- .../xpack/inference/InferencePlugin.java | 66 ++- .../TransportDeleteInferenceModelAction.java | 1 + .../TransportGetInferenceModelAction.java | 1 + .../action/TransportInferenceAction.java | 4 +- .../TransportPutInferenceModelAction.java | 1 + .../HuggingFaceElserResponseEntity.java | 2 +- .../OpenAiEmbeddingsResponseEntity.java | 2 +- .../rest/RestDeleteInferenceModelAction.java | 2 +- .../rest/RestGetInferenceModelAction.java | 2 +- .../inference/rest/RestInferenceAction.java | 2 +- .../rest/RestPutInferenceModelAction.java | 2 +- .../services/elser/ElserMlNodeService.java | 8 +- .../action/GetInferenceModelRequestTests.java | 1 + .../GetInferenceModelResponseTests.java | 1 + .../action/InferenceActionRequestTests.java | 1 + .../action/InferenceActionResponseTests.java | 3 +- .../action/PutInferenceModelRequestTests.java | 1 + .../PutInferenceModelResponseTests.java | 1 + .../HuggingFaceElserResponseEntityTests.java | 2 +- .../OpenAiEmbeddingsResponseEntityTests.java | 2 +- .../LegacyTextEmbeddingResultsTests.java | 1 + .../results/SparseEmbeddingResultsTests.java | 1 + .../results/TextEmbeddingResultsTests.java | 1 + .../elser/ElserMlNodeServiceTests.java | 4 +- .../ml-inference-service-tests/build.gradle | 12 + .../CoordinatedInferenceIngestIT.java | 309 ++++++++++++++ .../xpack/ml/integration/ExampleModels.java | 305 ++++++++++++++ .../xpack/ml/integration/InferenceIT.java | 3 +- .../ml/integration/TextEmbeddingQueryIT.java | 2 +- .../ml/integration/TextExpansionQueryIT.java | 2 +- .../xpack/ml/MachineLearning.java | 3 + .../TransportCoordinatedInferenceAction.java | 188 +++++++++ .../TrainedModelAssignmentUtils.java | 23 + .../inference/ingest/InferenceProcessor.java | 31 +- .../ml/queries/TextExpansionQueryBuilder.java | 59 +-- .../TextEmbeddingQueryVectorBuilder.java | 7 +- .../InferenceProcessorFactoryTests.java | 31 -- .../ingest/InferenceProcessorTests.java | 12 +- .../TextExpansionQueryBuilderTests.java | 8 +- .../TextEmbeddingQueryVectorBuilderTests.java | 12 +- .../xpack/security/operator/Constants.java | 1 + 68 files changed, 1746 insertions(+), 609 deletions(-) create mode 100644 docs/changelog/102731.yaml create mode 100644 server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java delete mode 100644 server/src/main/java/org/elasticsearch/plugins/InferenceServicePlugin.java rename x-pack/plugin/{inference/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/inference/action/DeleteInferenceModelAction.java (97%) rename x-pack/plugin/{inference/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/inference/action/GetInferenceModelAction.java (95%) rename x-pack/plugin/{inference/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/inference/action/InferenceAction.java (97%) rename x-pack/plugin/{inference/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/inference/action/PutInferenceModelAction.java (98%) rename x-pack/plugin/{inference/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/inference/results/LegacyTextEmbeddingResults.java (94%) rename x-pack/plugin/{inference/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/inference/results/SparseEmbeddingResults.java (99%) rename x-pack/plugin/{inference/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/inference/results/TextEmbeddingResults.java (98%) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java create mode 100644 x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java create mode 100644 x-pack/plugin/inference/qa/test-service-plugin/src/main/resources/META-INF/services/org.elasticsearch.inference.InferenceServiceExtension create mode 100644 x-pack/plugin/ml/qa/ml-inference-service-tests/build.gradle create mode 100644 x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java create mode 100644 x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExampleModels.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java diff --git a/docs/changelog/102731.yaml b/docs/changelog/102731.yaml new file mode 100644 index 0000000000000..a12e04bfab078 --- /dev/null +++ b/docs/changelog/102731.yaml @@ -0,0 +1,5 @@ +pr: 102731 +summary: Add internal inference action for ml models an services +area: Machine Learning +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 499cf5d5ca64f..80feebd435cb1 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -10,6 +10,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; import java.io.Closeable; import java.util.List; @@ -18,6 +19,8 @@ public interface InferenceService extends Closeable { + default void init(Client client) {} + String name(); /** diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java new file mode 100644 index 0000000000000..4b42e8ca53854 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceExtension.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.inference; + +import org.elasticsearch.client.internal.Client; + +import java.util.List; + +/** + * SPI extension that define inference services + */ +public interface InferenceServiceExtension { + + List getInferenceServiceFactories(); + + record InferenceServiceFactoryContext(Client client) {} + + interface Factory { + /** + * InferenceServices are created from the factory context + */ + InferenceService create(InferenceServiceFactoryContext context); + } +} diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java index ac1439150f8ec..a0ed7bbd82b24 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceRegistry.java @@ -8,9 +8,9 @@ package org.elasticsearch.inference; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.plugins.InferenceServicePlugin; import java.io.IOException; import java.util.ArrayList; @@ -26,18 +26,18 @@ public class InferenceServiceRegistry extends AbstractLifecycleComponent { private final List namedWriteables = new ArrayList<>(); public InferenceServiceRegistry( - List inferenceServicePlugins, - InferenceServicePlugin.InferenceServiceFactoryContext factoryContext + List inferenceServicePlugins, + InferenceServiceExtension.InferenceServiceFactoryContext factoryContext ) { // TODO check names are unique services = inferenceServicePlugins.stream() .flatMap(r -> r.getInferenceServiceFactories().stream()) .map(factory -> factory.create(factoryContext)) .collect(Collectors.toMap(InferenceService::name, Function.identity())); + } - for (var plugin : inferenceServicePlugins) { - namedWriteables.addAll(plugin.getInferenceServiceNamedWriteables()); - } + public void init(Client client) { + services.values().forEach(s -> s.init(client)); } public Map getServices() { diff --git a/server/src/main/java/org/elasticsearch/inference/TaskType.java b/server/src/main/java/org/elasticsearch/inference/TaskType.java index 9e96a7c4c52d0..5afedee873145 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskType.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskType.java @@ -20,7 +20,13 @@ public enum TaskType implements Writeable { TEXT_EMBEDDING, - SPARSE_EMBEDDING; + SPARSE_EMBEDDING, + ANY { + @Override + public boolean isAnyOrSame(TaskType other) { + return true; + } + }; public static String NAME = "task_type"; @@ -37,6 +43,16 @@ public static TaskType fromStringOrStatusException(String name) { } } + /** + * Return true if the {@code other} is the {@link #ANY} type + * or the same as this. + * @param other The other + * @return True if same or any. + */ + public boolean isAnyOrSame(TaskType other) { + return other == TaskType.ANY || other == this; + } + @Override public String toString() { return name().toLowerCase(Locale.ROOT); diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 0623c3b196e45..7a0fa7a821732 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -121,7 +121,6 @@ import org.elasticsearch.indices.recovery.plan.PeerOnlyRecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.RecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.ShardSnapshotsService; -import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.fs.FsHealthService; @@ -139,7 +138,6 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.HealthPlugin; -import org.elasticsearch.plugins.InferenceServicePlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.MetadataUpgrader; @@ -516,13 +514,6 @@ private void createClientAndRegistries(Settings settings, ThreadPool threadPool, localNodeFactory = new Node.LocalNodeFactory(settings, nodeEnvironment.nodeId()); - InferenceServiceRegistry inferenceServiceRegistry = new InferenceServiceRegistry( - pluginsService.filterPlugins(InferenceServicePlugin.class).toList(), - new InferenceServicePlugin.InferenceServiceFactoryContext(client) - ); - resourcesToClose.add(inferenceServiceRegistry); - modules.bindToInstance(InferenceServiceRegistry.class, inferenceServiceRegistry); - namedWriteableRegistry = new NamedWriteableRegistry( Stream.of( NetworkModule.getNamedWriteables().stream(), @@ -530,8 +521,7 @@ private void createClientAndRegistries(Settings settings, ThreadPool threadPool, searchModule.getNamedWriteables().stream(), pluginsService.flatMap(Plugin::getNamedWriteables), ClusterModule.getNamedWriteables().stream(), - SystemIndexMigrationExecutor.getNamedWriteables().stream(), - inferenceServiceRegistry.getNamedWriteables().stream() + SystemIndexMigrationExecutor.getNamedWriteables().stream() ).flatMap(Function.identity()).toList() ); xContentRegistry = new NamedXContentRegistry( diff --git a/server/src/main/java/org/elasticsearch/plugins/InferenceServicePlugin.java b/server/src/main/java/org/elasticsearch/plugins/InferenceServicePlugin.java deleted file mode 100644 index 2672a4b8fcbcf..0000000000000 --- a/server/src/main/java/org/elasticsearch/plugins/InferenceServicePlugin.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.plugins; - -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.inference.InferenceService; - -import java.util.List; - -/** - * InferenceServicePlugins implement an inference service - */ -public interface InferenceServicePlugin { - - List getInferenceServiceFactories(); - - record InferenceServiceFactoryContext(Client client) {} - - interface Factory { - /** - * InferenceServices are created from the factory context - */ - InferenceService create(InferenceServiceFactoryContext context); - } - - /** - * The named writables defined and used by each of the implemented - * InferenceServices. Each service should define named writables for - * - {@link org.elasticsearch.inference.TaskSettings} - * - {@link org.elasticsearch.inference.ServiceSettings} - * And optionally for {@link org.elasticsearch.inference.InferenceResults} - * if the service uses a new type of result. - * @return All named writables defined by the services - */ - List getInferenceServiceNamedWriteables(); -} diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index eb1271edd3b06..4aa2e145228b8 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -73,6 +73,8 @@ exports org.elasticsearch.xpack.core.ilm.step.info; exports org.elasticsearch.xpack.core.ilm; exports org.elasticsearch.xpack.core.indexing; + exports org.elasticsearch.xpack.core.inference.action; + exports org.elasticsearch.xpack.core.inference.results; exports org.elasticsearch.xpack.core.logstash; exports org.elasticsearch.xpack.core.ml.action; exports org.elasticsearch.xpack.core.ml.annotations; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/DeleteInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceModelAction.java similarity index 97% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/DeleteInferenceModelAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceModelAction.java index 4062946935b2e..1324471f7c0ab 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/DeleteInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceModelAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.action; +package org.elasticsearch.xpack.core.inference.action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/GetInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java similarity index 95% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/GetInferenceModelAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java index a9b1fb32a7471..6e2b3a7a89e32 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/GetInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.action; +package org.elasticsearch.xpack.core.inference.action; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; @@ -43,6 +43,11 @@ public Request(String modelId, String taskType) { this.taskType = TaskType.fromStringOrStatusException(taskType); } + public Request(String modelId, TaskType taskType) { + this.modelId = modelId; + this.taskType = taskType; + } + public Request(StreamInput in) throws IOException { super(in); this.modelId = in.readString(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java similarity index 97% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index a6d0818cf1ca4..53391aca84622 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.action; +package org.elasticsearch.xpack.core.inference.action; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersions; @@ -24,9 +24,9 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; -import org.elasticsearch.xpack.inference.results.LegacyTextEmbeddingResults; -import org.elasticsearch.xpack.inference.results.SparseEmbeddingResults; import java.io.IOException; import java.util.ArrayList; @@ -210,7 +210,7 @@ public Response(StreamInput in) throws IOException { } @SuppressWarnings("deprecation") - static InferenceServiceResults transformToServiceResults(List parsedResults) { + public static InferenceServiceResults transformToServiceResults(List parsedResults) { if (parsedResults.isEmpty()) { throw new ElasticsearchStatusException( "Failed to transform results to response format, expected a non-empty list, please remove and re-add the service", diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java similarity index 98% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java index 45b9474cebcdc..e6e4ea1001f68 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.action; +package org.elasticsearch.xpack.core.inference.action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java similarity index 94% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java index b5d6b8483138a..8f03a75c61c11 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/LegacyTextEmbeddingResults.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.results; +package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -39,8 +39,7 @@ * ] * } * - * This class represents the way that the {@link org.elasticsearch.xpack.inference.services.openai.OpenAiService} - * formatted the response for the embeddings type. This represents what was returned prior to the + * Legacy text embedding results represents what was returned prior to the * {@link org.elasticsearch.TransportVersions#INFERENCE_SERVICE_RESULTS_ADDED} version. * @deprecated use {@link TextEmbeddingResults} instead */ diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java similarity index 99% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java index 0e0299a5e12fd..20279e82d6c09 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.results; +package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java similarity index 98% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResults.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java index 74f94e1aea17d..7a7ccab2b4daa 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.results; +package org.elasticsearch.xpack.core.inference.results; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java new file mode 100644 index 0000000000000..8ff0c1179ea61 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java @@ -0,0 +1,242 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class CoordinatedInferenceAction extends ActionType { + + public static final CoordinatedInferenceAction INSTANCE = new CoordinatedInferenceAction(); + public static final String NAME = "cluster:internal/xpack/ml/coordinatedinference"; + + public CoordinatedInferenceAction() { + super(NAME, InferModelAction.Response::new); + } + + public static class Request extends ActionRequest { + + public enum RequestModelType { + INFERENCE_SERVICE_MODEL, + ML_NODE_PYTORCH_MODEL, + BOOSTED_TREE_MODEL, + NLP_MODEL, // Either an inference service model or ml pytorch model but not a boosted tree model + UNKNOWN + }; + + public static Request forTextInput( + String modelId, + List inputs, + @Nullable InferenceConfigUpdate inferenceConfigUpdate, + @Nullable Boolean previouslyLicensed, + @Nullable TimeValue inferenceTimeout + ) { + return new Request( + modelId, + inputs, + null, + null, + inferenceConfigUpdate, + previouslyLicensed, + inferenceTimeout, + false, // not high priority + RequestModelType.NLP_MODEL + ); + } + + public static Request forMapInput( + String modelId, + List> objectsToInfer, + @Nullable InferenceConfigUpdate inferenceConfigUpdate, + @Nullable Boolean previouslyLicensed, + @Nullable TimeValue inferenceTimeout, + RequestModelType modelType + ) { + return new Request( + modelId, + null, + null, + objectsToInfer, + inferenceConfigUpdate, + previouslyLicensed, + inferenceTimeout, + false, // not high priority, + modelType + ); + } + + private final String modelId; + private final RequestModelType requestModelType; + // For inference services or cluster hosted NLP models + private final List inputs; + // _inference settings + private final Map taskSettings; + // In cluster model options + private final TimeValue inferenceTimeout; + private final Boolean previouslyLicensed; + private final InferenceConfigUpdate inferenceConfigUpdate; + private boolean highPriority; + private TrainedModelPrefixStrings.PrefixType prefixType = TrainedModelPrefixStrings.PrefixType.NONE; + // DFA models only + private final List> objectsToInfer; + + private Request( + String modelId, + @Nullable List inputs, + @Nullable Map taskSettings, + @Nullable List> objectsToInfer, + @Nullable InferenceConfigUpdate inferenceConfigUpdate, + @Nullable Boolean previouslyLicensed, + @Nullable TimeValue inferenceTimeout, + boolean highPriority, + RequestModelType requestModelType + ) { + this.modelId = ExceptionsHelper.requireNonNull(modelId, "model_id"); + this.inputs = inputs; + this.taskSettings = taskSettings; + this.objectsToInfer = objectsToInfer; + this.inferenceConfigUpdate = inferenceConfigUpdate; + this.previouslyLicensed = previouslyLicensed; + this.inferenceTimeout = inferenceTimeout; + this.highPriority = highPriority; + this.requestModelType = requestModelType; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.modelId = in.readString(); + this.requestModelType = in.readEnum(RequestModelType.class); + this.inputs = in.readOptionalStringCollectionAsList(); + this.taskSettings = in.readMap(); + this.objectsToInfer = in.readOptionalCollectionAsList(StreamInput::readMap); + this.inferenceConfigUpdate = in.readOptionalNamedWriteable(InferenceConfigUpdate.class); + this.previouslyLicensed = in.readOptionalBoolean(); + this.inferenceTimeout = in.readOptionalTimeValue(); + this.highPriority = in.readBoolean(); + } + + public String getModelId() { + return modelId; + } + + public List getInputs() { + return inputs; + } + + public Map getTaskSettings() { + return taskSettings; + } + + public List> getObjectsToInfer() { + return objectsToInfer; + } + + public InferenceConfigUpdate getInferenceConfigUpdate() { + return inferenceConfigUpdate; + } + + public Boolean getPreviouslyLicensed() { + return previouslyLicensed; + } + + public TimeValue getInferenceTimeout() { + return inferenceTimeout; + } + + public boolean getHighPriority() { + return highPriority; + } + + public void setHighPriority(boolean highPriority) { + this.highPriority = highPriority; + } + + public boolean hasInferenceConfig() { + return inferenceConfigUpdate != null; + } + + public boolean hasObjects() { + return objectsToInfer != null; + } + + public void setPrefixType(TrainedModelPrefixStrings.PrefixType prefixType) { + this.prefixType = prefixType; + } + + public TrainedModelPrefixStrings.PrefixType getPrefixType() { + return prefixType; + } + + public RequestModelType getRequestModelType() { + return requestModelType; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(modelId); + out.writeEnum(requestModelType); + out.writeOptionalStringCollection(inputs); + out.writeGenericMap(taskSettings); + out.writeOptionalCollection(objectsToInfer, StreamOutput::writeGenericMap); + out.writeOptionalNamedWriteable(inferenceConfigUpdate); + out.writeOptionalBoolean(previouslyLicensed); + out.writeOptionalTimeValue(inferenceTimeout); + out.writeBoolean(highPriority); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(modelId, request.modelId) + && Objects.equals(requestModelType, request.requestModelType) + && Objects.equals(inputs, request.inputs) + && Objects.equals(taskSettings, request.taskSettings) + && Objects.equals(objectsToInfer, request.objectsToInfer) + && Objects.equals(inferenceConfigUpdate, request.inferenceConfigUpdate) + && Objects.equals(previouslyLicensed, request.previouslyLicensed) + && Objects.equals(inferenceTimeout, request.inferenceTimeout) + && Objects.equals(highPriority, request.highPriority); + } + + @Override + public int hashCode() { + return Objects.hash( + modelId, + requestModelType, + inputs, + taskSettings, + objectsToInfer, + inferenceConfigUpdate, + previouslyLicensed, + inferenceTimeout, + highPriority + ); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java index 0ba74df1f8d54..c098b13fd1deb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java @@ -19,6 +19,8 @@ public class EmptyConfigUpdate implements InferenceConfigUpdate { public static final String NAME = "empty"; + public static final EmptyConfigUpdate INSTANCE = new EmptyConfigUpdate(); + public static MlConfigVersion minimumSupportedVersion() { return MlConfigVersion.V_7_9_0; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java new file mode 100644 index 0000000000000..bd8e0ad96f21a --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class CoordinatedInferenceActionRequestTests extends AbstractWireSerializingTestCase { + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + List entries = new ArrayList<>(); + entries.addAll(new MlInferenceNamedXContentProvider().getNamedWriteables()); + return new NamedWriteableRegistry(entries); + } + + @Override + protected Writeable.Reader instanceReader() { + return CoordinatedInferenceAction.Request::new; + } + + @Override + protected CoordinatedInferenceAction.Request createTestInstance() { + return switch (randomIntBetween(0, 1)) { + case 0 -> { + var inferenceConfig = randomBoolean() ? null : InferModelActionRequestTests.randomInferenceConfigUpdate(); + var previouslyLicensed = randomBoolean() ? null : randomBoolean(); + var inferenceTimeout = randomBoolean() ? null : TimeValue.parseTimeValue(randomTimeValue(), null, "timeout"); + var highPriority = randomBoolean(); + + var request = CoordinatedInferenceAction.Request.forTextInput( + randomAlphaOfLength(6), + List.of(randomAlphaOfLength(6)), + inferenceConfig, + previouslyLicensed, + inferenceTimeout + ); + request.setHighPriority(highPriority); + yield request; + } + case 1 -> { + var inferenceConfig = randomBoolean() ? null : InferModelActionRequestTests.randomInferenceConfigUpdate(); + var previouslyLicensed = randomBoolean() ? null : randomBoolean(); + var inferenceTimeout = randomBoolean() ? null : TimeValue.parseTimeValue(randomTimeValue(), null, "timeout"); + var highPriority = randomBoolean(); + var modelType = randomFrom(CoordinatedInferenceAction.Request.RequestModelType.values()); + + var request = CoordinatedInferenceAction.Request.forMapInput( + randomAlphaOfLength(6), + Stream.generate(CoordinatedInferenceActionRequestTests::randomMap).limit(randomInt(5)).collect(Collectors.toList()), + inferenceConfig, + previouslyLicensed, + inferenceTimeout, + modelType + ); + request.setHighPriority(highPriority); + yield request; + } + default -> throw new UnsupportedOperationException(); + }; + } + + private static Map randomMap() { + return Stream.generate(() -> randomAlphaOfLength(10)) + .limit(randomInt(10)) + .collect(Collectors.toMap(Function.identity(), (v) -> randomAlphaOfLength(10))); + } + + @Override + protected CoordinatedInferenceAction.Request mutateInstance(CoordinatedInferenceAction.Request instance) throws IOException { + return null; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java index fcfc396313016..b33b64ccf69d7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/InferModelActionRequestTests.java @@ -130,7 +130,7 @@ protected Request mutateInstance(Request instance) { return r; } - private static InferenceConfigUpdate randomInferenceConfigUpdate() { + public static InferenceConfigUpdate randomInferenceConfigUpdate() { return randomFrom( RegressionConfigUpdateTests.randomRegressionConfigUpdate(), ClassificationConfigUpdateTests.randomClassificationConfigUpdate(), diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java index 5ed11958fc64e..058c921152afc 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java @@ -25,12 +25,8 @@ import java.util.Map; import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; public class MockInferenceServiceIT extends ESRestTestCase { @@ -84,7 +80,7 @@ public void testMockService() throws IOException { // The response is randomly generated, the input can be anything var inference = inferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomAlphaOfLength(10))); - assertNonEmptyInferenceResults(inference, TaskType.SPARSE_EMBEDDING); + assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); } @SuppressWarnings("unchecked") @@ -99,9 +95,7 @@ public void testMockServiceWithMultipleInputs() throws IOException { List.of(randomAlphaOfLength(5), randomAlphaOfLength(10), randomAlphaOfLength(15)) ); - var results = (List>) inference.get("result"); - assertThat(results, hasSize(3)); - assertNonEmptyInferenceResults(inference, TaskType.SPARSE_EMBEDDING); + assertNonEmptyInferenceResults(inference, 3, TaskType.SPARSE_EMBEDDING); } @SuppressWarnings("unchecked") @@ -149,21 +143,17 @@ private Map inferOnMockService(String modelId, TaskType taskType bodyBuilder.deleteCharAt(bodyBuilder.length() - 1); bodyBuilder.append("]}"); - System.out.println("body_request:" + bodyBuilder); request.setJsonEntity(bodyBuilder.toString()); - var reponse = client().performRequest(request); - assertOkWithErrorMessage(reponse); - return entityAsMap(reponse); + var response = client().performRequest(request); + assertOkWithErrorMessage(response); + return entityAsMap(response); } @SuppressWarnings("unchecked") - protected void assertNonEmptyInferenceResults(Map resultMap, TaskType taskType) { + protected void assertNonEmptyInferenceResults(Map resultMap, int expectedNumberOfResults, TaskType taskType) { if (taskType == TaskType.SPARSE_EMBEDDING) { - var results = (List) resultMap.get("result"); - assertThat(results, not(empty())); - for (String result : results) { - assertThat(result, is(not(emptyString()))); - } + var results = (List>) resultMap.get(TaskType.SPARSE_EMBEDDING.toString()); + assertThat(results, hasSize(expectedNumberOfResults)); } else { fail("test with task type [" + taskType + "] are not supported yet"); } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/build.gradle b/x-pack/plugin/inference/qa/test-service-plugin/build.gradle index 9020589f74a0c..031c7519154b1 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/build.gradle +++ b/x-pack/plugin/inference/qa/test-service-plugin/build.gradle @@ -6,6 +6,7 @@ esplugin { name 'inference-service-test' description 'A mock inference service' classname 'org.elasticsearch.xpack.inference.mock.TestInferenceServicePlugin' + extendedPlugins = ['x-pack-inference'] } dependencies { diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java new file mode 100644 index 0000000000000..0804685aa2cb0 --- /dev/null +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java @@ -0,0 +1,311 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.mock; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SecretSettings; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class TestInferenceServiceExtension implements InferenceServiceExtension { + @Override + public List getInferenceServiceFactories() { + return List.of(TestInferenceService::new); + } + + public static class TestInferenceService implements InferenceService { + private static final String NAME = "test_service"; + + public TestInferenceService(InferenceServiceExtension.InferenceServiceFactoryContext context) {} + + @Override + public String name() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + + @SuppressWarnings("unchecked") + private static Map getTaskSettingsMap(Map settings) { + Map taskSettingsMap; + // task settings are optional + if (settings.containsKey(ModelConfigurations.TASK_SETTINGS)) { + taskSettingsMap = (Map) settings.remove(ModelConfigurations.TASK_SETTINGS); + } else { + taskSettingsMap = Map.of(); + } + + return taskSettingsMap; + } + + @Override + @SuppressWarnings("unchecked") + public TestServiceModel parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platfromArchitectures + ) { + var serviceSettingsMap = (Map) config.remove(ModelConfigurations.SERVICE_SETTINGS); + var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); + var secretSettings = TestSecretSettings.fromMap(serviceSettingsMap); + + var taskSettingsMap = getTaskSettingsMap(config); + var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); + + return new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, secretSettings); + } + + @Override + @SuppressWarnings("unchecked") + public TestServiceModel parsePersistedConfig( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + var serviceSettingsMap = (Map) config.remove(ModelConfigurations.SERVICE_SETTINGS); + var secretSettingsMap = (Map) secrets.remove(ModelSecrets.SECRET_SETTINGS); + + var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); + var secretSettings = TestSecretSettings.fromMap(secretSettingsMap); + + var taskSettingsMap = getTaskSettingsMap(config); + var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); + + return new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, secretSettings); + } + + @Override + public void infer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ) { + switch (model.getConfigurations().getTaskType()) { + case ANY -> listener.onResponse(makeResults(input)); + case SPARSE_EMBEDDING -> listener.onResponse(makeResults(input)); + default -> listener.onFailure( + new ElasticsearchStatusException( + TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name()), + RestStatus.BAD_REQUEST + ) + ); + } + } + + private SparseEmbeddingResults makeResults(List input) { + var embeddings = new ArrayList(); + for (int i = 0; i < input.size(); i++) { + var tokens = new ArrayList(); + for (int j = 0; j < 5; j++) { + tokens.add(new SparseEmbeddingResults.WeightedToken(Integer.toString(j), (float) j)); + } + embeddings.add(new SparseEmbeddingResults.Embedding(tokens, false)); + } + return new SparseEmbeddingResults(embeddings); + } + + @Override + public void start(Model model, ActionListener listener) { + listener.onResponse(true); + } + + @Override + public void close() throws IOException {} + } + + public static class TestServiceModel extends Model { + + public TestServiceModel( + String modelId, + TaskType taskType, + String service, + TestServiceSettings serviceSettings, + TestTaskSettings taskSettings, + TestSecretSettings secretSettings + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); + } + + @Override + public TestServiceSettings getServiceSettings() { + return (TestServiceSettings) super.getServiceSettings(); + } + + @Override + public TestTaskSettings getTaskSettings() { + return (TestTaskSettings) super.getTaskSettings(); + } + + @Override + public TestSecretSettings getSecretSettings() { + return (TestSecretSettings) super.getSecretSettings(); + } + } + + public record TestServiceSettings(String model) implements ServiceSettings { + + static final String NAME = "test_service_settings"; + + public static TestServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String model = (String) map.remove("model"); + + if (model == null) { + validationException.addValidationError("missing model"); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new TestServiceSettings(model); + } + + public TestServiceSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("model", model); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(model); + } + } + + public record TestTaskSettings(Integer temperature) implements TaskSettings { + + static final String NAME = "test_task_settings"; + + public static TestTaskSettings fromMap(Map map) { + Integer temperature = (Integer) map.remove("temperature"); + return new TestTaskSettings(temperature); + } + + public TestTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalVInt(temperature); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (temperature != null) { + builder.field("temperature", temperature); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + } + + public record TestSecretSettings(String apiKey) implements SecretSettings { + + static final String NAME = "test_secret_settings"; + + public static TestSecretSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String apiKey = (String) map.remove("api_key"); + + if (apiKey == null) { + validationException.addValidationError("missing api_key"); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new TestSecretSettings(apiKey); + } + + public TestSecretSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(apiKey); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("api_key", apiKey); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests + } + } +} diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServicePlugin.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServicePlugin.java index 4d8cb18e541ff..0345d7b6e5926 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServicePlugin.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServicePlugin.java @@ -7,395 +7,34 @@ package org.elasticsearch.xpack.inference.mock; -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.InferenceResults; -import org.elasticsearch.inference.InferenceService; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.plugins.InferenceServicePlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; -import java.util.ArrayList; import java.util.List; -import java.util.Map; -import java.util.Set; -public class TestInferenceServicePlugin extends Plugin implements InferenceServicePlugin { +public class TestInferenceServicePlugin extends Plugin { @Override - public List getInferenceServiceFactories() { - return List.of(TestInferenceService::new, TestInferenceServiceClusterService::new); - } - - @Override - public List getInferenceServiceNamedWriteables() { + public List getNamedWriteables() { return List.of( - new NamedWriteableRegistry.Entry(ServiceSettings.class, TestServiceSettings.NAME, TestServiceSettings::new), - new NamedWriteableRegistry.Entry(TaskSettings.class, TestTaskSettings.NAME, TestTaskSettings::new), - new NamedWriteableRegistry.Entry(SecretSettings.class, TestSecretSettings.NAME, TestSecretSettings::new) + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + TestInferenceServiceExtension.TestServiceSettings.NAME, + TestInferenceServiceExtension.TestServiceSettings::new + ), + new NamedWriteableRegistry.Entry( + TaskSettings.class, + TestInferenceServiceExtension.TestTaskSettings.NAME, + TestInferenceServiceExtension.TestTaskSettings::new + ), + new NamedWriteableRegistry.Entry( + SecretSettings.class, + TestInferenceServiceExtension.TestSecretSettings.NAME, + TestInferenceServiceExtension.TestSecretSettings::new + ) ); } - - public static class TestInferenceService extends TestInferenceServiceBase { - private static final String NAME = "test_service"; - - public TestInferenceService(InferenceServiceFactoryContext context) { - super(context); - } - - @Override - public String name() { - return NAME; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests - } - } - - public static class TestInferenceServiceClusterService extends TestInferenceServiceBase { - private static final String NAME = "test_service_in_cluster_service"; - - public TestInferenceServiceClusterService(InferenceServiceFactoryContext context) { - super(context); - } - - @Override - public boolean isInClusterService() { - return true; - } - - @Override - public String name() { - return NAME; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests - } - } - - public abstract static class TestInferenceServiceBase implements InferenceService { - - @SuppressWarnings("unchecked") - private static Map getTaskSettingsMap(Map settings) { - Map taskSettingsMap; - // task settings are optional - if (settings.containsKey(ModelConfigurations.TASK_SETTINGS)) { - taskSettingsMap = (Map) settings.remove(ModelConfigurations.TASK_SETTINGS); - } else { - taskSettingsMap = Map.of(); - } - - return taskSettingsMap; - } - - public TestInferenceServiceBase(InferenceServicePlugin.InferenceServiceFactoryContext context) { - - } - - @Override - @SuppressWarnings("unchecked") - public TestServiceModel parseRequestConfig( - String modelId, - TaskType taskType, - Map config, - Set platfromArchitectures - ) { - var serviceSettingsMap = (Map) config.remove(ModelConfigurations.SERVICE_SETTINGS); - var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); - var secretSettings = TestSecretSettings.fromMap(serviceSettingsMap); - - var taskSettingsMap = getTaskSettingsMap(config); - var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); - - return new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, secretSettings); - } - - @Override - @SuppressWarnings("unchecked") - public TestServiceModel parsePersistedConfig( - String modelId, - TaskType taskType, - Map config, - Map secrets - ) { - var serviceSettingsMap = (Map) config.remove(ModelConfigurations.SERVICE_SETTINGS); - var secretSettingsMap = (Map) secrets.remove(ModelSecrets.SECRET_SETTINGS); - - var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); - var secretSettings = TestSecretSettings.fromMap(secretSettingsMap); - - var taskSettingsMap = getTaskSettingsMap(config); - var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); - - return new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, secretSettings); - } - - @Override - public void infer( - Model model, - List input, - Map taskSettings, - ActionListener listener - ) { - switch (model.getConfigurations().getTaskType()) { - case SPARSE_EMBEDDING -> { - var strings = new ArrayList(); - for (int i = 0; i < input.size(); i++) { - strings.add(Integer.toString(i)); - } - - listener.onResponse(new TestResults(strings)); - } - default -> listener.onFailure( - new ElasticsearchStatusException( - TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name()), - RestStatus.BAD_REQUEST - ) - ); - } - - } - - @Override - public void start(Model model, ActionListener listener) { - listener.onResponse(true); - } - - @Override - public void close() throws IOException {} - } - - public static class TestServiceModel extends Model { - - public TestServiceModel( - String modelId, - TaskType taskType, - String service, - TestServiceSettings serviceSettings, - TestTaskSettings taskSettings, - TestSecretSettings secretSettings - ) { - super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); - } - - @Override - public TestServiceSettings getServiceSettings() { - return (TestServiceSettings) super.getServiceSettings(); - } - - @Override - public TestTaskSettings getTaskSettings() { - return (TestTaskSettings) super.getTaskSettings(); - } - - @Override - public TestSecretSettings getSecretSettings() { - return (TestSecretSettings) super.getSecretSettings(); - } - } - - public record TestServiceSettings(String model) implements ServiceSettings { - - private static final String NAME = "test_service_settings"; - - public static TestServiceSettings fromMap(Map map) { - ValidationException validationException = new ValidationException(); - - String model = (String) map.remove("model"); - - if (model == null) { - validationException.addValidationError("missing model"); - } - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - return new TestServiceSettings(model); - } - - public TestServiceSettings(StreamInput in) throws IOException { - this(in.readString()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("model", model); - builder.endObject(); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(model); - } - } - - public record TestTaskSettings(Integer temperature) implements TaskSettings { - - private static final String NAME = "test_task_settings"; - - public static TestTaskSettings fromMap(Map map) { - Integer temperature = (Integer) map.remove("temperature"); - return new TestTaskSettings(temperature); - } - - public TestTaskSettings(StreamInput in) throws IOException { - this(in.readOptionalVInt()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalVInt(temperature); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (temperature != null) { - builder.field("temperature", temperature); - } - builder.endObject(); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests - } - } - - public record TestSecretSettings(String apiKey) implements SecretSettings { - - private static final String NAME = "test_secret_settings"; - - public static TestSecretSettings fromMap(Map map) { - ValidationException validationException = new ValidationException(); - - String apiKey = (String) map.remove("api_key"); - - if (apiKey == null) { - validationException.addValidationError("missing api_key"); - } - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - return new TestSecretSettings(apiKey); - } - - public TestSecretSettings(StreamInput in) throws IOException { - this(in.readString()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(apiKey); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("api_key", apiKey); - builder.endObject(); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests - } - } - - private static class TestResults implements InferenceServiceResults, InferenceResults { - - private static final String RESULTS_FIELD = "result"; - private List result; - - TestResults(List result) { - this.result = result; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(RESULTS_FIELD, result); - return builder; - } - - @Override - public String getWriteableName() { - return "test_result"; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeStringCollection(result); - } - - @Override - public String getResultsField() { - return RESULTS_FIELD; - } - - @Override - public List transformToLegacyFormat() { - return List.of(this); - } - - @Override - public Map asMap() { - return Map.of("result", result); - } - - @Override - public Map asMap(String outputField) { - return Map.of(outputField, result); - } - - @Override - public Object predictedValue() { - return result; - } - } } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/resources/META-INF/services/org.elasticsearch.inference.InferenceServiceExtension b/x-pack/plugin/inference/qa/test-service-plugin/src/main/resources/META-INF/services/org.elasticsearch.inference.InferenceServiceExtension new file mode 100644 index 0000000000000..019a6dad7be85 --- /dev/null +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/resources/META-INF/services/org.elasticsearch.inference.InferenceServiceExtension @@ -0,0 +1 @@ +org.elasticsearch.xpack.inference.mock.TestInferenceServiceExtension diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index 520a4cc5c0526..3c0db8228409e 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -10,12 +10,12 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.plugins.InferenceServicePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -106,7 +106,7 @@ public void testGetModel() throws Exception { UnparsedModel unparsedModel = UnparsedModel.unparsedModelFromMap(modelHolder.get().config(), modelHolder.get().secrets()); assertEquals(model.getConfigurations().getService(), unparsedModel.service()); - var elserService = new ElserMlNodeService(new InferenceServicePlugin.InferenceServiceFactoryContext(mock(Client.class))); + var elserService = new ElserMlNodeService(new InferenceServiceExtension.InferenceServiceFactoryContext(mock(Client.class))); ElserMlNodeModel roundTripModel = elserService.parsePersistedConfig( unparsedModel.modelId(), unparsedModel.taskType(), diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 801b0a1cd755c..87f623bdfe5cc 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -21,7 +21,9 @@ exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; exports org.elasticsearch.xpack.inference.rest; - exports org.elasticsearch.xpack.inference.results; exports org.elasticsearch.xpack.inference.services; + exports org.elasticsearch.xpack.inference.services.elser; + exports org.elasticsearch.xpack.inference.services.huggingface.elser; + exports org.elasticsearch.xpack.inference.services.openai; exports org.elasticsearch.xpack.inference; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 0ba7ca1d49150..092b1200fb80a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -14,9 +14,9 @@ import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; -import org.elasticsearch.xpack.inference.results.LegacyTextEmbeddingResults; -import org.elasticsearch.xpack.inference.results.SparseEmbeddingResults; -import org.elasticsearch.xpack.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 476f19a286d53..7e7f2c9e05680 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -21,8 +21,10 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.inference.InferenceServiceRegistry; import org.elasticsearch.plugins.ActionPlugin; -import org.elasticsearch.plugins.InferenceServicePlugin; +import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; @@ -30,10 +32,10 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.inference.action.DeleteInferenceModelAction; -import org.elasticsearch.xpack.inference.action.GetInferenceModelAction; -import org.elasticsearch.xpack.inference.action.InferenceAction; -import org.elasticsearch.xpack.inference.action.PutInferenceModelAction; +import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportInferenceAction; @@ -53,13 +55,14 @@ import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; import org.elasticsearch.xpack.inference.services.openai.OpenAiService; +import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; -public class InferencePlugin extends Plugin implements ActionPlugin, InferenceServicePlugin, SystemIndexPlugin { +public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin { public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; @@ -69,6 +72,9 @@ public class InferencePlugin extends Plugin implements ActionPlugin, InferenceSe private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); + private final SetOnce inferenceServiceRegistry = new SetOnce<>(); + private List inferenceServiceExtensions; + public InferencePlugin(Settings settings) { this.settings = settings; } @@ -117,7 +123,39 @@ public Collection createComponents(PluginServices services) { httpFactory.set(httpRequestSenderFactory); ModelRegistry modelRegistry = new ModelRegistry(services.client()); - return List.of(modelRegistry); + + if (inferenceServiceExtensions == null) { + inferenceServiceExtensions = new ArrayList<>(); + } + var inferenceServices = new ArrayList<>(inferenceServiceExtensions); + inferenceServices.add(this::getInferenceServiceFactories); + + var factoryContext = new InferenceServiceExtension.InferenceServiceFactoryContext(services.client()); + var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); + registry.init(services.client()); + inferenceServiceRegistry.set(registry); + + return List.of(modelRegistry, registry); + } + + @Override + public void loadExtensions(ExtensionLoader loader) { + inferenceServiceExtensions = loader.loadExtensions(InferenceServiceExtension.class); + } + + public List getInferenceServiceFactories() { + return List.of( + ElserMlNodeService::new, + context -> new HuggingFaceElserService(httpFactory, serviceComponents), + context -> new OpenAiService(httpFactory, serviceComponents) + ); + } + + @Override + public List getNamedWriteables() { + var entries = new ArrayList(); + entries.addAll(InferenceNamedWriteablesProvider.getNamedWriteables()); + return entries; } @Override @@ -182,20 +220,6 @@ public String getFeatureDescription() { return "Inference plugin for managing inference services and inference"; } - @Override - public List getInferenceServiceFactories() { - return List.of( - ElserMlNodeService::new, - context -> new HuggingFaceElserService(httpFactory, serviceComponents), - context -> new OpenAiService(httpFactory, serviceComponents) - ); - } - - @Override - public List getInferenceServiceNamedWriteables() { - return InferenceNamedWriteablesProvider.getNamedWriteables(); - } - @Override public void close() { var serviceComponentsRef = serviceComponents.get(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java index 4305ff5a7b631..88a364d1de8fe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; import org.elasticsearch.xpack.inference.registry.ModelRegistry; public class TransportDeleteInferenceModelAction extends AcknowledgedTransportMasterNodeAction { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index 90fe9667c33aa..ddc70f4e8d846 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.inference.UnparsedModel; import org.elasticsearch.xpack.inference.registry.ModelRegistry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index 7718739420cf1..095ffa2635234 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.inference.UnparsedModel; import org.elasticsearch.xpack.inference.registry.ModelRegistry; @@ -57,7 +58,8 @@ protected void doExecute(Task task, InferenceAction.Request request, ActionListe return; } - if (request.getTaskType() != unparsedModel.taskType()) { + if (request.getTaskType().isAnyOrSame(unparsedModel.taskType()) == false) { + // not the wildcard task type and not the model task type listener.onFailure( new ElasticsearchStatusException( "Incompatible task_type, the requested type [{}] does not match the model type [{}]", diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index 569d4e023928b..f6bb90d701a4a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.inference.InferencePlugin; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java index 566ca9ff1351f..fab22dce889a5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -13,8 +13,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.results.SparseEmbeddingResults; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java index 60b568678987d..c301ab2194415 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java @@ -13,8 +13,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.results.TextEmbeddingResults; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java index 74050d4b32e89..184b310a9f829 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.inference.action.DeleteInferenceModelAction; +import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; import java.util.List; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index f57c800bd5bdc..98f0c1c1aeeb1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.inference.action.GetInferenceModelAction; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java index 9d7a0d331b2b3..beecf75da38ab 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java index cf0eb857feba9..1199cf5688fcc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.inference.action.PutInferenceModelAction; +import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java index f1fab447ec757..f9cc74550469c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -14,17 +14,17 @@ import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.plugins.InferenceServicePlugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; -import org.elasticsearch.xpack.inference.results.SparseEmbeddingResults; import java.io.IOException; import java.util.List; @@ -52,7 +52,7 @@ public class ElserMlNodeService implements InferenceService { private final OriginSettingClient client; - public ElserMlNodeService(InferenceServicePlugin.InferenceServiceFactoryContext context) { + public ElserMlNodeService(InferenceServiceExtension.InferenceServiceFactoryContext context) { this.client = new OriginSettingClient(context.client(), ClientHelper.INFERENCE_ORIGIN); } @@ -160,7 +160,7 @@ public void start(Model model, ActionListener listener) { public void infer(Model model, List input, Map taskSettings, ActionListener listener) { // No task settings to override with requestTaskSettings - if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { + if (TaskType.SPARSE_EMBEDDING.isAnyOrSame(model.getConfigurations().getTaskType()) == false) { listener.onFailure( new ElasticsearchStatusException( TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java index 0b30dc9021038..dd422753faddb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; public class GetInferenceModelRequestTests extends AbstractWireSerializingTestCase { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelResponseTests.java index 472e4123c52e6..72f6f43126f7c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelResponseTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelResponseTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; import org.elasticsearch.xpack.inference.ModelConfigurationsTests; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java index d263cf8c776ea..aa540694ba564 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java index 515b6c268d0af..759411cec1212 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; @@ -25,7 +26,7 @@ import static org.elasticsearch.TransportVersions.INFERENCE_SERVICE_RESULTS_ADDED; import static org.elasticsearch.TransportVersions.ML_INFERENCE_OPENAI_ADDED; import static org.elasticsearch.TransportVersions.ML_INFERENCE_TASK_SETTINGS_OPTIONAL_ADDED; -import static org.elasticsearch.xpack.inference.action.InferenceAction.Response.transformToServiceResults; +import static org.elasticsearch.xpack.core.inference.action.InferenceAction.Response.transformToServiceResults; public class InferenceActionResponseTests extends AbstractBWCWireSerializationTestCase { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java index 9aefea9a942db..bdbca6426b601 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; public class PutInferenceModelRequestTests extends AbstractWireSerializingTestCase { @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java index 0a2ad4699cca8..89bd0247a9ccf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider; import org.elasticsearch.xpack.inference.ModelConfigurationsTests; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java index ce94bfceed4fb..606e0cc83f451 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentEOFException; import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; import java.io.IOException; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java index a3ec162b05ec8..56d8171640b53 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java @@ -10,8 +10,8 @@ import org.apache.http.HttpResponse; import org.elasticsearch.common.ParsingException; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.results.TextEmbeddingResults; import java.io.IOException; import java.nio.charset.StandardCharsets; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java index 6553f1e7f8ae3..605411343533f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/LegacyTextEmbeddingResultsTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.LegacyTextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java index 9ab33ef777445..0a8bfd20caaf1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java index fabb6c3de0fbc..71d14e09872fd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java index 56a592a490712..4cce176e78c12 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java @@ -9,10 +9,10 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.plugins.InferenceServicePlugin; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -223,7 +223,7 @@ public void testParseRequestConfig_DefaultModel() { } private ElserMlNodeService createService(Client client) { - var context = new InferenceServicePlugin.InferenceServiceFactoryContext(client); + var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElserMlNodeService(context); } } diff --git a/x-pack/plugin/ml/qa/ml-inference-service-tests/build.gradle b/x-pack/plugin/ml/qa/ml-inference-service-tests/build.gradle new file mode 100644 index 0000000000000..83226acb383c7 --- /dev/null +++ b/x-pack/plugin/ml/qa/ml-inference-service-tests/build.gradle @@ -0,0 +1,12 @@ +apply plugin: 'elasticsearch.internal-java-rest-test' + +dependencies { + javaRestTestImplementation(testArtifact(project(xpackModule('core')))) + javaRestTestImplementation(testArtifact(project(xpackModule('ml')))) + javaRestTestImplementation project(path: xpackModule('inference')) + clusterPlugins project(':x-pack:plugin:inference:qa:test-service-plugin') +} + +tasks.named("javaRestTest").configure { + usesDefaultDistribution() +} diff --git a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java new file mode 100644 index 0000000000000..c4c3ee016be0e --- /dev/null +++ b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java @@ -0,0 +1,309 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.client.Request; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Strings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.ml.utils.MapHelper; +import org.junit.ClassRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; + +public class CoordinatedInferenceIngestIT extends ESRestTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("xpack.security.enabled", "true") + .plugin("org.elasticsearch.xpack.inference.mock.TestInferenceServicePlugin") + .user("x_pack_rest_user", "x-pack-test-password") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @SuppressWarnings("unchecked") + public void testIngestWithMultipleModelTypes() throws IOException { + // Create an inference service model, dfa model and pytorch model + var inferenceServiceModelId = "is_model"; + var boostedTreeModelId = "boosted_tree_model"; + var pyTorchModelId = "pytorch_model"; + + putInferenceServiceModel(inferenceServiceModelId, TaskType.SPARSE_EMBEDDING); + putBoostedTreeRegressionModel(boostedTreeModelId); + putPyTorchModel(pyTorchModelId); + putPyTorchModelDefinition(pyTorchModelId); + putPyTorchModelVocabulary(List.of("these", "are", "my", "words"), pyTorchModelId); + startDeployment(pyTorchModelId); + + String docs = """ + [ + { + "_source": { + "title": "my", + "body": "these are" + } + }, + { + "_source": { + "title": "are", + "body": "my words" + } + } + ] + """; + + { + var responseMap = simulatePipeline(ExampleModels.nlpModelPipelineDefinition(inferenceServiceModelId), docs); + var simulatedDocs = (List>) responseMap.get("docs"); + assertThat(simulatedDocs, hasSize(2)); + assertEquals(inferenceServiceModelId, MapHelper.dig("doc._source.ml.model_id", simulatedDocs.get(0))); + var sparseEmbedding = (Map) MapHelper.dig("doc._source.ml.body", simulatedDocs.get(0)); + assertEquals(Double.valueOf(1.0), sparseEmbedding.get("1")); + assertEquals(inferenceServiceModelId, MapHelper.dig("doc._source.ml.model_id", simulatedDocs.get(1))); + sparseEmbedding = (Map) MapHelper.dig("doc._source.ml.body", simulatedDocs.get(1)); + assertEquals(Double.valueOf(1.0), sparseEmbedding.get("1")); + } + + { + var responseMap = simulatePipeline(ExampleModels.nlpModelPipelineDefinition(pyTorchModelId), docs); + var simulatedDocs = (List>) responseMap.get("docs"); + assertThat(simulatedDocs, hasSize(2)); + assertEquals(pyTorchModelId, MapHelper.dig("doc._source.ml.model_id", simulatedDocs.get(0))); + List> results = (List>) MapHelper.dig("doc._source.ml.body", simulatedDocs.get(0)); + assertThat(results.get(0), contains(1.0, 1.0)); + assertEquals(pyTorchModelId, MapHelper.dig("doc._source.ml.model_id", simulatedDocs.get(1))); + results = (List>) MapHelper.dig("doc._source.ml.body", simulatedDocs.get(1)); + assertThat(results.get(0), contains(1.0, 1.0)); + } + + String boostedTreeDocs = Strings.format(""" + [ + { + "_source": %s + }, + { + "_source": %s + } + ] + """, ExampleModels.randomBoostedTreeModelDoc(), ExampleModels.randomBoostedTreeModelDoc()); + { + var responseMap = simulatePipeline( + ExampleModels.boostedTreeRegressionModelPipelineDefinition(boostedTreeModelId), + boostedTreeDocs + ); + var simulatedDocs = (List>) responseMap.get("docs"); + assertThat(simulatedDocs, hasSize(2)); + assertEquals(boostedTreeModelId, MapHelper.dig("doc._source.ml.regression.model_id", simulatedDocs.get(0))); + assertNotNull(MapHelper.dig("doc._source.ml.regression.predicted_value", simulatedDocs.get(0))); + assertEquals(boostedTreeModelId, MapHelper.dig("doc._source.ml.regression.model_id", simulatedDocs.get(1))); + assertNotNull(MapHelper.dig("doc._source.ml.regression.predicted_value", simulatedDocs.get(1))); + } + } + + @SuppressWarnings("unchecked") + public void testPipelineConfiguredWithFieldMap() throws IOException { + // Create an inference service model, dfa model and pytorch model + var inferenceServiceModelId = "is_model"; + var boostedTreeModelId = "boosted_tree_model"; + var pyTorchModelId = "pytorch_model"; + + putInferenceServiceModel(inferenceServiceModelId, TaskType.SPARSE_EMBEDDING); + putBoostedTreeRegressionModel(boostedTreeModelId); + putPyTorchModel(pyTorchModelId); + putPyTorchModelDefinition(pyTorchModelId); + putPyTorchModelVocabulary(List.of("these", "are", "my", "words"), pyTorchModelId); + startDeployment(pyTorchModelId); + + String docs = """ + [ + { + "_source": { + "body": "these are" + } + }, + { + "_source": { + "body": "my words" + } + } + ] + """; + + { + var responseMap = simulatePipeline(ExampleModels.nlpModelPipelineDefinitionWithFieldMap(pyTorchModelId), docs); + var simulatedDocs = (List>) responseMap.get("docs"); + assertThat(simulatedDocs, hasSize(2)); + assertEquals(pyTorchModelId, MapHelper.dig("doc._source.ml.inference.model_id", simulatedDocs.get(0))); + List> results = (List>) MapHelper.dig( + "doc._source.ml.inference.predicted_value", + simulatedDocs.get(0) + ); + assertThat(results.get(0), contains(1.0, 1.0)); + assertEquals(pyTorchModelId, MapHelper.dig("doc._source.ml.inference.model_id", simulatedDocs.get(1))); + results = (List>) MapHelper.dig("doc._source.ml.inference.predicted_value", simulatedDocs.get(1)); + assertThat(results.get(0), contains(1.0, 1.0)); + } + + { + // Inference service models cannot be configured with the field map + var responseMap = simulatePipeline(ExampleModels.nlpModelPipelineDefinitionWithFieldMap(inferenceServiceModelId), docs); + var simulatedDocs = (List>) responseMap.get("docs"); + var errorMsg = (String) MapHelper.dig("error.reason", simulatedDocs.get(0)); + assertThat(errorMsg, containsString("[is_model] is configured for the _inference API and does not accept documents as input")); + assertThat(simulatedDocs, hasSize(2)); + } + + } + + @SuppressWarnings("unchecked") + public void testWithUndeployedPyTorchModel() throws IOException { + var pyTorchModelId = "test-undeployed"; + + putPyTorchModel(pyTorchModelId); + putPyTorchModelDefinition(pyTorchModelId); + putPyTorchModelVocabulary(List.of("these", "are", "my", "words"), pyTorchModelId); + + String docs = """ + [ + { + "_source": { + "title": "my", + "body": "these are" + } + }, + { + "_source": { + "title": "are", + "body": "my words" + } + } + ] + """; + + { + var responseMap = simulatePipeline(ExampleModels.nlpModelPipelineDefinition(pyTorchModelId), docs); + var simulatedDocs = (List>) responseMap.get("docs"); + assertThat(simulatedDocs, hasSize(2)); + var errorMsg = (String) MapHelper.dig("error.reason", simulatedDocs.get(0)); + assertEquals("[" + pyTorchModelId + "] is not an inference service model or a deployed ml model", errorMsg); + } + + { + var responseMap = simulatePipeline(ExampleModels.nlpModelPipelineDefinitionWithFieldMap(pyTorchModelId), docs); + var simulatedDocs = (List>) responseMap.get("docs"); + assertThat(simulatedDocs, hasSize(2)); + var errorMsg = (String) MapHelper.dig("error.reason", simulatedDocs.get(0)); + assertEquals( + "Model [" + pyTorchModelId + "] must be deployed to use. Please deploy with the start trained model deployment API.", + errorMsg + ); + } + } + + private Map putInferenceServiceModel(String modelId, TaskType taskType) throws IOException { + String endpoint = org.elasticsearch.common.Strings.format("_inference/%s/%s", taskType, modelId); + var request = new Request("PUT", endpoint); + var modelConfig = ExampleModels.mockServiceModelConfig(); + request.setJsonEntity(modelConfig); + var response = client().performRequest(request); + return entityAsMap(response); + } + + private void putPyTorchModel(String modelId) throws IOException { + Request request = new Request("PUT", "_ml/trained_models/" + modelId); + var modelConfiguration = ExampleModels.pytorchPassThroughModelConfig(); + request.setJsonEntity(modelConfiguration); + client().performRequest(request); + } + + protected void putPyTorchModelVocabulary(List vocabulary, String modelId) throws IOException { + List vocabularyWithPad = new ArrayList<>(); + vocabularyWithPad.add("[PAD]"); + vocabularyWithPad.add("[UNK]"); + vocabularyWithPad.addAll(vocabulary); + String quotedWords = vocabularyWithPad.stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(",")); + + Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/vocabulary"); + request.setJsonEntity(Strings.format(""" + { "vocabulary": [%s] } + """, quotedWords)); + client().performRequest(request); + } + + protected Map simulatePipeline(String pipelineDef, String docs) throws IOException { + String simulate = Strings.format(""" + { + "pipeline": %s, + "docs": %s + }""", pipelineDef, docs); + + Request request = new Request("POST", "_ingest/pipeline/_simulate?error_trace=true"); + request.setJsonEntity(simulate); + return entityAsMap(client().performRequest(request)); + } + + protected void putPyTorchModelDefinition(String modelId) throws IOException { + Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/definition/0"); + String body = Strings.format( + """ + {"total_definition_length":%s,"definition": "%s","total_parts": 1}""", + ExampleModels.RAW_PYTORCH_MODEL_SIZE, + ExampleModels.BASE_64_ENCODED_PYTORCH_MODEL + ); + request.setJsonEntity(body); + client().performRequest(request); + } + + protected void startDeployment(String modelId) throws IOException { + String endPoint = "/_ml/trained_models/" + + modelId + + "/deployment/_start?timeout=40s&wait_for=started&threads_per_allocation=1&number_of_allocations=1"; + + Request request = new Request("POST", endPoint); + client().performRequest(request); + } + + private void putBoostedTreeRegressionModel(String modelId) throws IOException { + Request request = new Request("PUT", "_ml/trained_models/" + modelId); + var modelConfiguration = ExampleModels.boostedTreeRegressionModel(); + request.setJsonEntity(modelConfiguration); + client().performRequest(request); + } + + public Map getModel(String modelId, TaskType taskType) throws IOException { + var endpoint = org.elasticsearch.common.Strings.format("_inference/%s/%s", taskType, modelId); + var request = new Request("GET", endpoint); + var reponse = client().performRequest(request); + return entityAsMap(reponse); + } +} diff --git a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExampleModels.java b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExampleModels.java new file mode 100644 index 0000000000000..f9f4d6bf474e9 --- /dev/null +++ b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExampleModels.java @@ -0,0 +1,305 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Strings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Base64; +import java.util.Map; + +import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.randomIntBetween; + +public class ExampleModels { + + static final String BASE_64_ENCODED_PYTORCH_MODEL = + "UEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAUAA4Ac2ltcGxlbW9kZWwvZGF0YS5wa2xGQgoAWlpaWlpaWlpaWoACY19fdG9yY2hfXwp" + + "TdXBlclNpbXBsZQpxACmBfShYCAAAAHRyYWluaW5ncQGIdWJxAi5QSwcIXOpBBDQAAAA0AAAAUEsDBBQACAgIAAAAAAAAAAAAAAAAAA" + + "AAAAAdAEEAc2ltcGxlbW9kZWwvY29kZS9fX3RvcmNoX18ucHlGQj0AWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaW" + + "lpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWnWOMWvDMBCF9/yKI5MMrnHTQsHgjt2aJdlCEIp9SgWSTpykFvfXV1htaYds0nfv473Jqhjh" + + "kAPywbhgUbzSnC02wwZAyqBYOUzIUUoY4XRe6SVr/Q8lVsYbf4UBLkS2kBk1aOIPxbOIaPVQtEQ8vUnZ/WlrSxTA+JCTNHMc4Ig+Ele" + + "s+Jod+iR3N/jDDf74wxu4e/5+DmtE9mUyhdgFNq7bZ3ekehbruC6aTxS/c1rom6Z698WrEfIYxcn4JGTftLA7tzCnJeD41IJVC+U07k" + + "umUHw3E47Vqh+xnULeFisYLx064mV8UTZibWFMmX0p23wBUEsHCE0EGH3yAAAAlwEAAFBLAwQUAAgICAAAAAAAAAAAAAAAAAAAAAAAJ" + + "wA5AHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5LmRlYnVnX3BrbEZCNQBaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpa" + + "WlpaWlpaWlpaWlpaWlpaWlpaWlpaWrWST0+DMBiHW6bOod/BGS94kKpo2Mwyox5x3pbgiXSAFtdR/nQu3IwHiZ9oX88CaeGu9tL0efq" + + "+v8P7fmiGA1wgTgoIcECZQqe6vmYD6G4hAJOcB1E8NazTm+ELyzY4C3Q0z8MsRwF+j4JlQUPEEo5wjH0WB9hCNFqgpOCExZY5QnnEw7" + + "ME+0v8GuaIs8wnKI7RigVrKkBzm0lh2OdjkeHllG28f066vK6SfEypF60S+vuYt4gjj2fYr/uPrSvRv356TepfJ9iWJRN0OaELQSZN3" + + "FRPNbcP1PTSntMr0x0HzLZQjPYIEo3UaFeiISRKH0Mil+BE/dyT1m7tCBLwVO1MX4DK3bbuTlXuy8r71j5Aoho66udAoseOnrdVzx28" + + "UFW6ROuO/lT6QKKyo79VU54emj9QSwcInsUTEDMBAAAFAwAAUEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAZAAYAc2ltcGxlbW9kZWw" + + "vY29uc3RhbnRzLnBrbEZCAgBaWoACKS5QSwcIbS8JVwQAAAAEAAAAUEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAATADsAc2ltcGxlbW" + + "9kZWwvdmVyc2lvbkZCNwBaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaMwpQSwcI0" + + "Z5nVQIAAAACAAAAUEsBAgAAAAAICAAAAAAAAFzqQQQ0AAAANAAAABQAAAAAAAAAAAAAAAAAAAAAAHNpbXBsZW1vZGVsL2RhdGEucGts" + + "UEsBAgAAFAAICAgAAAAAAE0EGH3yAAAAlwEAAB0AAAAAAAAAAAAAAAAAhAAAAHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5UEs" + + "BAgAAFAAICAgAAAAAAJ7FExAzAQAABQMAACcAAAAAAAAAAAAAAAAAAgIAAHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5LmRlYn" + + "VnX3BrbFBLAQIAAAAACAgAAAAAAABtLwlXBAAAAAQAAAAZAAAAAAAAAAAAAAAAAMMDAABzaW1wbGVtb2RlbC9jb25zdGFudHMucGtsU" + + "EsBAgAAAAAICAAAAAAAANGeZ1UCAAAAAgAAABMAAAAAAAAAAAAAAAAAFAQAAHNpbXBsZW1vZGVsL3ZlcnNpb25QSwYGLAAAAAAAAAAe" + + "Ay0AAAAAAAAAAAAFAAAAAAAAAAUAAAAAAAAAagEAAAAAAACSBAAAAAAAAFBLBgcAAAAA/AUAAAAAAAABAAAAUEsFBgAAAAAFAAUAagE" + + "AAJIEAAAAAA=="; + static final long RAW_PYTORCH_MODEL_SIZE; // size of the model before base64 encoding + static { + RAW_PYTORCH_MODEL_SIZE = Base64.getDecoder().decode(BASE_64_ENCODED_PYTORCH_MODEL).length; + } + + static String pytorchPassThroughModelConfig() { + return """ + { + "description": "simple model for testing", + "model_type": "pytorch", + "inference_config": { + "pass_through": { + "tokenization": { + "bert": { + "with_special_tokens": false + } + } + } + } + } + """; + } + + static String mockServiceModelConfig() { + return org.elasticsearch.common.Strings.format(""" + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + "temperature": 3 + } + } + """); + } + + private static final String REGRESSION_DEFINITION = """ + { "preprocessors": [ + { + "one_hot_encoding": { + "field": "col1", + "hot_map": { + "male": "col1_male", + "female": "col1_female" + } + } + }, + { + "target_mean_encoding": { + "field": "col2", + "feature_name": "col2_encoded", + "target_map": { + "S": 5.0, + "M": 10.0, + "L": 20 + }, + "default_value": 5.0 + } + }, + { + "frequency_encoding": { + "field": "col3", + "feature_name": "col3_encoded", + "frequency_map": { + "none": 0.75, + "true": 0.10, + "false": 0.15 + } + } + } + ], + "trained_model": { + "ensemble": { + "feature_names": [ + "col1_male", + "col1_female", + "col2_encoded", + "col3_encoded", + "col4" + ], + "aggregate_output": { + "weighted_sum": { + "weights": [ + 0.5, + 0.5 + ] + } + }, + "target_type": "regression", + "trained_models": [ + { + "tree": { + "feature_names": [ + "col1_male", + "col1_female", + "col4" + ], + "tree_structure": [ + { + "node_index": 0, + "split_feature": 0, + "split_gain": 12.0, + "threshold": 10.0, + "decision_type": "lte", + "number_samples": 300, + "default_left": true, + "left_child": 1, + "right_child": 2 + }, + { + "node_index": 1, + "number_samples": 100, + "leaf_value": 1 + }, + { + "node_index": 2, + "number_samples": 200, + "leaf_value": 2 + } + ], + "target_type": "regression" + } + }, + { + "tree": { + "feature_names": [ + "col2_encoded", + "col3_encoded", + "col4" + ], + "tree_structure": [ + { + "node_index": 0, + "split_feature": 0, + "split_gain": 12.0, + "threshold": 10.0, + "decision_type": "lte", + "default_left": true, + "number_samples": 150, + "left_child": 1, + "right_child": 2 + }, + { + "node_index": 1, + "number_samples": 50, + "leaf_value": 1 + }, + { + "node_index": 2, + "number_samples": 100, + "leaf_value": 2 + } + ], + "target_type": "regression" + } + } + ] + } + } + }"""; + + public static String boostedTreeRegressionModel() { + return Strings.format(""" + { + "input": { + "field_names": [ + "col1", + "col2", + "col3", + "col4" + ] + }, + "description": "test model for regression", + "inference_config": { + "regression": {} + }, + "definition": %s + }""", REGRESSION_DEFINITION); + } + + public static String nlpModelPipelineDefinition(String modelId) { + return Strings.format(""" + { + "processors": [ + { + "inference": { + "model_id": "%s", + "input_output": { + "input_field": "body", + "output_field": "ml.body" + } + } + } + ] + }""", modelId); + } + + public static String nlpModelPipelineDefinitionWithFieldMap(String modelId) { + return Strings.format(""" + { + "processors": [ + { + "inference": { + "model_id": "%s", + "field_map": { + "body": "input" + } + } + } + ] + }""", modelId); + } + + public static String boostedTreeRegressionModelPipelineDefinition(String modelId) { + return Strings.format(""" + { + "processors": [ + { + "inference": { + "target_field": "ml.regression", + "model_id": "%s", + "inference_config": { + "regression": {} + }, + "field_map": { + "col1": "col1", + "col2": "col2", + "col3": "col3", + "col4": "col4" + } + } + } + ] + }""", modelId); + } + + public static String randomBoostedTreeModelDoc() throws IOException { + Map values = Map.of( + "col1", + randomFrom("female", "male"), + "col2", + randomFrom("S", "M", "L", "XL"), + "col3", + randomFrom("true", "false", "none", "other"), + "col4", + randomIntBetween(0, 10) + ); + + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(values)) { + return XContentHelper.convertToJson(BytesReference.bytes(xContentBuilder), false, XContentType.JSON); + } + } + + private ExampleModels() {} +} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIT.java index c8c580d2933c1..24bdbe23eb5ab 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIT.java @@ -283,7 +283,7 @@ private void putModelAlias(String modelAlias, String newModel) throws IOExceptio } }"""; - private static final String REGRESSION_CONFIG = Strings.format(""" + public static final String REGRESSION_CONFIG = Strings.format(""" { "input": { "field_names": [ @@ -325,5 +325,4 @@ private void putModel(String modelId, String modelConfiguration) throws IOExcept request.setJsonEntity(modelConfiguration); client().performRequest(request); } - } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java index 8e425ea071879..82597e16837c6 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java @@ -288,7 +288,7 @@ public void testSearchWithMissingModel() { String indexName = modelId + "-index"; var e = expectThrows(ResponseException.class, () -> textEmbeddingSearch(indexName, "the machine is leaking", modelId, "embedding")); - assertThat(e.getMessage(), containsString("Could not find trained model [missing-model]")); + assertThat(e.getMessage(), containsString("[missing-model] is not an inference service model or a deployed ml model")); } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java index dbf489e8abf23..6075391326509 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextExpansionQueryIT.java @@ -262,7 +262,7 @@ public void testSearchWithMissingModel() throws IOException { String modelId = "missing-model"; String indexName = modelId + "-index"; var e = expectThrows(ResponseException.class, () -> textExpansionSearch(indexName, "the machine is leaking", modelId, "ml.tokens")); - assertThat(e.getMessage(), containsString("Could not find trained model [missing-model]")); + assertThat(e.getMessage(), containsString("[missing-model] is not an inference service model or a deployed ml model")); } protected Response textExpansionSearch(String index, String modelText, String modelId, String fieldName) throws IOException { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 3320a51009257..db23e7796f862 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -95,6 +95,7 @@ import org.elasticsearch.xpack.core.ml.action.CancelJobModelSnapshotUpgradeAction; import org.elasticsearch.xpack.core.ml.action.ClearDeploymentCacheAction; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; +import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; @@ -197,6 +198,7 @@ import org.elasticsearch.xpack.ml.action.TransportCancelJobModelSnapshotUpgradeAction; import org.elasticsearch.xpack.ml.action.TransportClearDeploymentCacheAction; import org.elasticsearch.xpack.ml.action.TransportCloseJobAction; +import org.elasticsearch.xpack.ml.action.TransportCoordinatedInferenceAction; import org.elasticsearch.xpack.ml.action.TransportCreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.ml.action.TransportDeleteCalendarAction; import org.elasticsearch.xpack.ml.action.TransportDeleteCalendarEventAction; @@ -1573,6 +1575,7 @@ public List getRestHandlers( TransportUpdateTrainedModelAssignmentStateAction.class ) ); + actionHandlers.add(new ActionHandler<>(CoordinatedInferenceAction.INSTANCE, TransportCoordinatedInferenceAction.class)); } } return actionHandlers; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java new file mode 100644 index 0000000000000..d90c9ec807495 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java @@ -0,0 +1,188 @@ + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; +import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; +import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils; + +import java.util.ArrayList; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class TransportCoordinatedInferenceAction extends HandledTransportAction< + CoordinatedInferenceAction.Request, + InferModelAction.Response> { + + private final Client client; + private final ClusterService clusterService; + + @Inject + public TransportCoordinatedInferenceAction( + TransportService transportService, + ActionFilters actionFilters, + Client client, + ClusterService clusterService + ) { + super( + CoordinatedInferenceAction.NAME, + transportService, + actionFilters, + CoordinatedInferenceAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.client = client; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, CoordinatedInferenceAction.Request request, ActionListener listener) { + if (request.getRequestModelType() == CoordinatedInferenceAction.Request.RequestModelType.NLP_MODEL) { + // must be an inference service model or ml hosted model + forNlp(request, listener); + } else if (request.hasObjects()) { + // Inference service models do not accept a document map + // If this fails check if the model is an inference service + // model and error accordingly + doInClusterModel(request, wrapCheckForServiceModelOnMissing(request.getModelId(), listener)); + } else { + forNlp(request, listener); + } + } + + private void forNlp(CoordinatedInferenceAction.Request request, ActionListener listener) { + var clusterState = clusterService.state(); + var assignments = TrainedModelAssignmentUtils.modelAssignments(request.getModelId(), clusterState); + if (assignments == null || assignments.isEmpty()) { + doInferenceServiceModel( + request, + ActionListener.wrap( + listener::onResponse, + e -> replaceErrorOnMissing( + e, + () -> new ElasticsearchStatusException( + "[" + request.getModelId() + "] is not an inference service model or a deployed ml model", + RestStatus.NOT_FOUND + ), + listener + ) + ) + ); + } else { + doInClusterModel(request, listener); + } + } + + private void doInferenceServiceModel(CoordinatedInferenceAction.Request request, ActionListener listener) { + executeAsyncWithOrigin( + client, + INFERENCE_ORIGIN, + InferenceAction.INSTANCE, + new InferenceAction.Request(TaskType.ANY, request.getModelId(), request.getInputs(), request.getTaskSettings()), + ActionListener.wrap(r -> listener.onResponse(translateInferenceServiceResponse(r.getResults())), listener::onFailure) + ); + } + + private void doInClusterModel(CoordinatedInferenceAction.Request request, ActionListener listener) { + var inferModelRequest = translateRequest(request); + executeAsyncWithOrigin(client, ML_ORIGIN, InferModelAction.INSTANCE, inferModelRequest, listener); + } + + static InferModelAction.Request translateRequest(CoordinatedInferenceAction.Request request) { + InferenceConfigUpdate inferenceConfigUpdate = request.getInferenceConfigUpdate() == null + ? EmptyConfigUpdate.INSTANCE + : request.getInferenceConfigUpdate(); + + var inferModelRequest = request.hasObjects() + ? InferModelAction.Request.forIngestDocs( + request.getModelId(), + request.getObjectsToInfer(), + inferenceConfigUpdate, + request.getPreviouslyLicensed(), + request.getInferenceTimeout() + ) + : InferModelAction.Request.forTextInput( + request.getModelId(), + inferenceConfigUpdate, + request.getInputs(), + request.getPreviouslyLicensed(), + request.getInferenceTimeout() + ); + inferModelRequest.setPrefixType(request.getPrefixType()); + inferModelRequest.setHighPriority(request.getHighPriority()); + return inferModelRequest; + } + + private ActionListener wrapCheckForServiceModelOnMissing( + String modelId, + ActionListener listener + ) { + return ActionListener.wrap(listener::onResponse, originalError -> { + if (ExceptionsHelper.unwrapCause(originalError) instanceof ResourceNotFoundException) { + executeAsyncWithOrigin( + client, + INFERENCE_ORIGIN, + GetInferenceModelAction.INSTANCE, + new GetInferenceModelAction.Request(modelId, TaskType.ANY), + ActionListener.wrap( + model -> listener.onFailure( + new ElasticsearchStatusException( + "[" + modelId + "] is configured for the _inference API and does not accept documents as input", + RestStatus.BAD_REQUEST + ) + ), + e -> listener.onFailure(originalError) + ) + ); + } else { + listener.onFailure(originalError); + } + }); + } + + private void replaceErrorOnMissing( + Exception originalError, + Supplier replaceOnMissing, + ActionListener listener + ) { + if (ExceptionsHelper.unwrapCause(originalError) instanceof ResourceNotFoundException) { + listener.onFailure(replaceOnMissing.get()); + } else { + listener.onFailure(originalError); + } + } + + static InferModelAction.Response translateInferenceServiceResponse(InferenceServiceResults inferenceResults) { + var legacyResults = new ArrayList(inferenceResults.transformToLegacyFormat()); + return new InferModelAction.Response(legacyResults, null, false); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java index 257c944c08605..3640d8dcb2808 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java @@ -7,10 +7,16 @@ package org.elasticsearch.xpack.ml.inference.assignment; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfoUpdate; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingStateAndReason; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; + +import java.util.List; +import java.util.Optional; public class TrainedModelAssignmentUtils { public static final String NODES_CHANGED_REASON = "nodes changed"; @@ -24,5 +30,22 @@ public static RoutingInfo createShuttingDownRoute(RoutingInfo existingRoute) { return routeUpdate.apply(existingRoute); } + public static List modelAssignments(String modelId, ClusterState state) { + String concreteModelId = Optional.ofNullable(ModelAliasMetadata.fromState(state).getModelId(modelId)).orElse(modelId); + + List assignments; + + TrainedModelAssignmentMetadata trainedModelAssignmentMetadata = TrainedModelAssignmentMetadata.fromState(state); + TrainedModelAssignment assignment = trainedModelAssignmentMetadata.getDeploymentAssignment(concreteModelId); + if (assignment != null) { + assignments = List.of(assignment); + } else { + // look up by model + assignments = trainedModelAssignmentMetadata.getDeploymentsUsingModel(concreteModelId); + } + + return assignments; + } + private TrainedModelAssignmentUtils() {} } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java index e600ddd42107f..470605dcb2d9c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessor.java @@ -25,11 +25,11 @@ import org.elasticsearch.ingest.Processor; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MlConfigVersion; +import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfigUpdate; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.FillMaskConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.FillMaskConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; @@ -170,7 +170,7 @@ private InferenceProcessor( this.client = ExceptionsHelper.requireNonNull(client, "client"); this.auditor = ExceptionsHelper.requireNonNull(auditor, "auditor"); this.modelId = ExceptionsHelper.requireNonNull(modelId, MODEL_ID); - this.inferenceConfig = ExceptionsHelper.requireNonNull(inferenceConfig, INFERENCE_CONFIG); + this.inferenceConfig = inferenceConfig; this.ignoreMissing = ignoreMissing; if (configuredWithInputsFields) { @@ -191,7 +191,7 @@ public String getModelId() { @Override public void execute(IngestDocument ingestDocument, BiConsumer handler) { - InferModelAction.Request request; + CoordinatedInferenceAction.Request request; try { request = buildRequest(ingestDocument); } catch (ElasticsearchStatusException e) { @@ -202,7 +202,7 @@ public void execute(IngestDocument ingestDocument, BiConsumer handleResponse(r, ingestDocument, handler), e -> handler.accept(ingestDocument, e)) ); @@ -223,7 +223,7 @@ void handleResponse(InferModelAction.Response response, IngestDocument ingestDoc } } - InferModelAction.Request buildRequest(IngestDocument ingestDocument) { + CoordinatedInferenceAction.Request buildRequest(IngestDocument ingestDocument) { if (configuredWithInputsFields) { // ignore missing only applies when using an input field list List requestInputs = new ArrayList<>(); @@ -246,10 +246,10 @@ InferModelAction.Request buildRequest(IngestDocument ingestDocument) { } } } - var request = InferModelAction.Request.forTextInput( + var request = CoordinatedInferenceAction.Request.forTextInput( modelId, - inferenceConfig, requestInputs, + inferenceConfig, previouslyLicensed, InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST ); @@ -263,12 +263,13 @@ InferModelAction.Request buildRequest(IngestDocument ingestDocument) { } LocalModel.mapFieldsIfNecessary(fields, fieldMap); - var request = InferModelAction.Request.forIngestDocs( + var request = CoordinatedInferenceAction.Request.forMapInput( modelId, List.of(fields), inferenceConfig, previouslyLicensed, - InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST + InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST, + CoordinatedInferenceAction.Request.RequestModelType.UNKNOWN ); request.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); return request; @@ -409,15 +410,9 @@ public InferenceProcessor create( String modelId = ConfigurationUtils.readStringProperty(TYPE, tag, config, MODEL_ID); - InferenceConfigUpdate inferenceConfigUpdate; + InferenceConfigUpdate inferenceConfigUpdate = null; Map inferenceConfigMap = ConfigurationUtils.readOptionalMap(TYPE, tag, config, INFERENCE_CONFIG); - if (inferenceConfigMap == null) { - if (minNodeVersion.before(EmptyConfigUpdate.minimumSupportedVersion())) { - // an inference config is required when the empty update is not supported - throw newConfigurationException(TYPE, tag, INFERENCE_CONFIG, "required property is missing"); - } - inferenceConfigUpdate = new EmptyConfigUpdate(); - } else { + if (inferenceConfigMap != null) { inferenceConfigUpdate = inferenceConfigUpdateFromMap(inferenceConfigMap); } @@ -445,7 +440,7 @@ public InferenceProcessor create( ); } - if (inferenceConfigUpdate.getResultsField() != null) { + if (inferenceConfigUpdate != null && inferenceConfigUpdate.getResultsField() != null) { throw newConfigurationException( TYPE, tag, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java index 7cdeeb3d559ec..12019e93ba713 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java @@ -24,6 +24,7 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; @@ -124,10 +125,10 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws return weightedTokensToQuery(fieldName, weightedTokensSupplier.get(), queryRewriteContext); } - InferModelAction.Request inferRequest = InferModelAction.Request.forTextInput( + CoordinatedInferenceAction.Request inferRequest = CoordinatedInferenceAction.Request.forTextInput( modelId, - TextExpansionConfigUpdate.EMPTY_UPDATE, List.of(modelText), + TextExpansionConfigUpdate.EMPTY_UPDATE, false, InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API ); @@ -136,32 +137,38 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws SetOnce textExpansionResultsSupplier = new SetOnce<>(); queryRewriteContext.registerAsyncAction((client, listener) -> { - executeAsyncWithOrigin(client, ML_ORIGIN, InferModelAction.INSTANCE, inferRequest, ActionListener.wrap(inferenceResponse -> { + executeAsyncWithOrigin( + client, + ML_ORIGIN, + CoordinatedInferenceAction.INSTANCE, + inferRequest, + ActionListener.wrap(inferenceResponse -> { - if (inferenceResponse.getInferenceResults().isEmpty()) { - listener.onFailure(new IllegalStateException("inference response contain no results")); - return; - } + if (inferenceResponse.getInferenceResults().isEmpty()) { + listener.onFailure(new IllegalStateException("inference response contain no results")); + return; + } - if (inferenceResponse.getInferenceResults().get(0) instanceof TextExpansionResults textExpansionResults) { - textExpansionResultsSupplier.set(textExpansionResults); - listener.onResponse(null); - } else if (inferenceResponse.getInferenceResults().get(0) instanceof WarningInferenceResults warning) { - listener.onFailure(new IllegalStateException(warning.getWarning())); - } else { - listener.onFailure( - new IllegalStateException( - "expected a result of type [" - + TextExpansionResults.NAME - + "] received [" - + inferenceResponse.getInferenceResults().get(0).getWriteableName() - + "]. Is [" - + modelId - + "] a compatible model?" - ) - ); - } - }, listener::onFailure)); + if (inferenceResponse.getInferenceResults().get(0) instanceof TextExpansionResults textExpansionResults) { + textExpansionResultsSupplier.set(textExpansionResults); + listener.onResponse(null); + } else if (inferenceResponse.getInferenceResults().get(0) instanceof WarningInferenceResults warning) { + listener.onFailure(new IllegalStateException(warning.getWarning())); + } else { + listener.onFailure( + new IllegalStateException( + "expected a result of type [" + + TextExpansionResults.NAME + + "] received [" + + inferenceResponse.getInferenceResults().get(0).getWriteableName() + + "]. Is [" + + modelId + + "] a compatible model?" + ) + ); + } + }, listener::onFailure) + ); }); return new TextExpansionQueryBuilder(this, textExpansionResultsSupplier); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java index 72663b3f8a7bd..bd0916065ec5f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilder.java @@ -18,6 +18,7 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; @@ -93,17 +94,17 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public void buildVector(Client client, ActionListener listener) { - InferModelAction.Request inferRequest = InferModelAction.Request.forTextInput( + CoordinatedInferenceAction.Request inferRequest = CoordinatedInferenceAction.Request.forTextInput( modelId, - TextEmbeddingConfigUpdate.EMPTY_INSTANCE, List.of(modelText), + TextEmbeddingConfigUpdate.EMPTY_INSTANCE, false, InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API ); inferRequest.setHighPriority(true); inferRequest.setPrefixType(TrainedModelPrefixStrings.PrefixType.SEARCH); - executeAsyncWithOrigin(client, ML_ORIGIN, InferModelAction.INSTANCE, inferRequest, ActionListener.wrap(response -> { + executeAsyncWithOrigin(client, ML_ORIGIN, CoordinatedInferenceAction.INSTANCE, inferRequest, ActionListener.wrap(response -> { if (response.getInferenceResults().isEmpty()) { listener.onFailure(new IllegalStateException("text embedding inference response contain no results")); return; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java index 5c98ac53c7228..0698c266400b0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java @@ -313,37 +313,6 @@ public void testCreateProcessorWithTooOldMinNodeVersionNlp() throws IOException }); } - public void testCreateProcessorWithEmptyConfigNotSupportedOnOldNode() throws IOException { - Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); - - includeNodeInfoValues.forEach(includeNodeInfo -> { - InferenceProcessor.Factory processorFactory = new InferenceProcessor.Factory( - client, - clusterService, - Settings.EMPTY, - includeNodeInfo - ); - try { - processorFactory.accept(builderClusterStateWithModelReferences(MlConfigVersion.V_7_5_0, "model1")); - } catch (IOException ioe) { - throw new AssertionError(ioe.getMessage()); - } - - Map minimalConfig = new HashMap<>() { - { - put(InferenceProcessor.MODEL_ID, "my_model"); - put(InferenceProcessor.TARGET_FIELD, "result"); - } - }; - - ElasticsearchException ex = expectThrows( - ElasticsearchException.class, - () -> processorFactory.create(Collections.emptyMap(), "my_inference_processor", null, minimalConfig) - ); - assertThat(ex.getMessage(), equalTo("[inference_config] required property is missing")); - }); - } - public void testCreateProcessor() { Set includeNodeInfoValues = new HashSet<>(Arrays.asList(true, false)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java index 4821efa29631f..6feb014309fe9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorTests.java @@ -420,7 +420,7 @@ public void testHandleResponseLicenseChanged() { IngestDocument document = TestIngestDocument.emptyIngestDocument(); - assertThat(inferenceProcessor.buildRequest(document).isPreviouslyLicensed(), is(false)); + assertThat(inferenceProcessor.buildRequest(document).getPreviouslyLicensed(), is(false)); InferModelAction.Response response = new InferModelAction.Response( Collections.singletonList(new RegressionInferenceResults(0.7, RegressionConfig.EMPTY_PARAMS)), @@ -432,7 +432,7 @@ public void testHandleResponseLicenseChanged() { assertThat(ex, is(nullValue())); }); - assertThat(inferenceProcessor.buildRequest(document).isPreviouslyLicensed(), is(true)); + assertThat(inferenceProcessor.buildRequest(document).getPreviouslyLicensed(), is(true)); response = new InferModelAction.Response( Collections.singletonList(new RegressionInferenceResults(0.7, RegressionConfig.EMPTY_PARAMS)), @@ -445,7 +445,7 @@ public void testHandleResponseLicenseChanged() { assertThat(ex, is(nullValue())); }); - assertThat(inferenceProcessor.buildRequest(document).isPreviouslyLicensed(), is(true)); + assertThat(inferenceProcessor.buildRequest(document).getPreviouslyLicensed(), is(true)); inferenceProcessor.handleResponse(response, document, (doc, ex) -> { assertThat(doc, is(not(nullValue()))); @@ -608,8 +608,8 @@ public void testBuildRequestWithInputFields() { document.setFieldValue("unrelated", "text"); var request = inferenceProcessor.buildRequest(document); - assertTrue(request.getObjectsToInfer().isEmpty()); - var requestInputs = request.getTextInput(); + assertNull(request.getObjectsToInfer()); + var requestInputs = request.getInputs(); assertThat(requestInputs, contains("body_text", "title_text")); assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_INGEST, request.getInferenceTimeout()); assertEquals(TrainedModelPrefixStrings.PrefixType.INGEST, request.getPrefixType()); @@ -683,7 +683,7 @@ public void testBuildRequestWithInputFields_MissingField() { document.setFieldValue("unrelated", 1.0); var request = inferenceProcessor.buildRequest(document); - var requestInputs = request.getTextInput(); + var requestInputs = request.getInputs(); assertThat(requestInputs, contains("body_text", "")); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index d8edea137330f..5e414a7f997d5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; @@ -72,14 +73,15 @@ public void testMustRewrite() { @Override protected boolean canSimulateMethod(Method method, Object[] args) throws NoSuchMethodException { return method.equals(Client.class.getMethod("execute", ActionType.class, ActionRequest.class, ActionListener.class)) - && (args[0] instanceof InferModelAction); + && (args[0] instanceof CoordinatedInferenceAction); } @Override protected Object simulateMethod(Method method, Object[] args) { - InferModelAction.Request request = (InferModelAction.Request) args[1]; + CoordinatedInferenceAction.Request request = (CoordinatedInferenceAction.Request) args[1]; assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API, request.getInferenceTimeout()); assertEquals(TrainedModelPrefixStrings.PrefixType.SEARCH, request.getPrefixType()); + assertEquals(CoordinatedInferenceAction.Request.RequestModelType.NLP_MODEL, request.getRequestModelType()); // Randomisation cannot be used here as {@code #doAssertLuceneQuery} // asserts that 2 rewritten queries are the same @@ -89,7 +91,7 @@ protected Object simulateMethod(Method method, Object[] args) { } var response = InferModelAction.Response.builder() - .setId(request.getId()) + .setId(request.getModelId()) .addInferenceResults(List.of(new TextExpansionResults("foo", tokens, randomBoolean()))) .build(); @SuppressWarnings("unchecked") // We matched the method above. diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java index 8506be491f7e1..a44aa9404f4f9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.test.AbstractQueryVectorBuilderTestCase; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; @@ -34,13 +35,14 @@ protected List additionalPlugins() { @Override protected void doAssertClientRequest(ActionRequest request, TextEmbeddingQueryVectorBuilder builder) { - assertThat(request, instanceOf(InferModelAction.Request.class)); - InferModelAction.Request inferRequest = (InferModelAction.Request) request; - assertThat(inferRequest.getTextInput(), hasSize(1)); - assertEquals(builder.getModelText(), inferRequest.getTextInput().get(0)); - assertEquals(builder.getModelId(), inferRequest.getId()); + assertThat(request, instanceOf(CoordinatedInferenceAction.Request.class)); + CoordinatedInferenceAction.Request inferRequest = (CoordinatedInferenceAction.Request) request; + assertThat(inferRequest.getInputs(), hasSize(1)); + assertEquals(builder.getModelText(), inferRequest.getInputs().get(0)); + assertEquals(builder.getModelId(), inferRequest.getModelId()); assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API, inferRequest.getInferenceTimeout()); assertEquals(TrainedModelPrefixStrings.PrefixType.SEARCH, inferRequest.getPrefixType()); + assertEquals(CoordinatedInferenceAction.Request.RequestModelType.NLP_MODEL, inferRequest.getRequestModelType()); } public ActionResponse createResponse(float[] array, TextEmbeddingQueryVectorBuilder builder) { diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 11edc66977e6c..0164ee8f6122e 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -280,6 +280,7 @@ public class Constants { "cluster:admin/xpack/watcher/settings/update", "cluster:admin/xpack/watcher/watch/put", "cluster:internal/remote_cluster/nodes", + "cluster:internal/xpack/ml/coordinatedinference", "cluster:internal/xpack/ml/datafeed/isolate", "cluster:internal/xpack/ml/datafeed/running_state", "cluster:internal/xpack/ml/inference/infer", From 5a44968d56f54cf06db5e1474539abf19d28bb2f Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Fri, 1 Dec 2023 09:19:13 +0000 Subject: [PATCH 125/263] Remove RerouteService from ClusterService (#102818) RerouteService was only being hidden inside ClusterService for accessibility, it doesn't actually need to be there. So pull it out as a first-class service and change users to get it directly instead --- .../elasticsearch/action/ActionModule.java | 4 +- .../TransportUpdateDesiredNodesAction.java | 7 +++- .../TransportClusterUpdateSettingsAction.java | 6 ++- .../routing/allocation/AllocationService.java | 7 +--- .../cluster/service/ClusterService.java | 37 ------------------- .../elasticsearch/node/NodeConstruction.java | 18 ++++----- .../org/elasticsearch/plugins/Plugin.java | 6 +++ .../service/ReservedClusterStateService.java | 9 ++++- .../snapshots/SnapshotsService.java | 7 +++- .../action/ActionModuleTests.java | 7 +++- ...ransportUpdateDesiredNodesActionTests.java | 3 ++ .../ClusterUpdateSettingsRequestTests.java | 2 + .../AbstractHttpServerTransportTests.java | 1 + .../service/FileSettingsServiceTests.java | 10 +++-- .../ReservedClusterStateServiceTests.java | 21 +++++++---- .../snapshots/SnapshotResiliencyTests.java | 2 +- .../TransportMigrateToDataTiersAction.java | 31 +++++++++------- .../ReservedLifecycleStateServiceTests.java | 4 ++ .../xpack/lucene/bwc/OldLuceneVersions.java | 6 +-- .../SearchableSnapshots.java | 6 +-- .../xpack/security/SecurityTests.java | 3 +- .../TransportDeleteShutdownNodeAction.java | 6 ++- .../TransportPutShutdownNodeAction.java | 6 ++- ...ransportDeleteShutdownNodeActionTests.java | 3 ++ .../TransportPutShutdownNodeActionTests.java | 3 ++ ...vedSnapshotLifecycleStateServiceTests.java | 2 + 26 files changed, 119 insertions(+), 98 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index e0f01405bcf0f..8e008dc57c81b 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -266,6 +266,7 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.NamedRegistry; import org.elasticsearch.common.inject.AbstractModule; @@ -516,6 +517,7 @@ public ActionModule( SystemIndices systemIndices, Tracer tracer, ClusterService clusterService, + RerouteService rerouteService, List> reservedStateHandlers, RestExtension restExtension ) { @@ -562,7 +564,7 @@ public ActionModule( } else { restController = new RestController(restInterceptor, nodeClient, circuitBreakerService, usageService, tracer); } - reservedClusterStateService = new ReservedClusterStateService(clusterService, reservedStateHandlers); + reservedClusterStateService = new ReservedClusterStateService(clusterService, rerouteService, reservedStateHandlers); this.restExtension = restExtension; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java index e3373ded94dc7..9f4c42a810563 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java @@ -45,6 +45,7 @@ public class TransportUpdateDesiredNodesAction extends TransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(TransportUpdateDesiredNodesAction.class); + private final RerouteService rerouteService; private final FeatureService featureService; private final Consumer> desiredNodesValidator; private final MasterServiceTaskQueue taskQueue; @@ -53,6 +54,7 @@ public class TransportUpdateDesiredNodesAction extends TransportMasterNodeAction public TransportUpdateDesiredNodesAction( TransportService transportService, ClusterService clusterService, + RerouteService rerouteService, FeatureService featureService, ThreadPool threadPool, ActionFilters actionFilters, @@ -62,6 +64,7 @@ public TransportUpdateDesiredNodesAction( this( transportService, clusterService, + rerouteService, featureService, threadPool, actionFilters, @@ -74,6 +77,7 @@ public TransportUpdateDesiredNodesAction( TransportUpdateDesiredNodesAction( TransportService transportService, ClusterService clusterService, + RerouteService rerouteService, FeatureService featureService, ThreadPool threadPool, ActionFilters actionFilters, @@ -93,12 +97,13 @@ public TransportUpdateDesiredNodesAction( UpdateDesiredNodesResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + this.rerouteService = rerouteService; this.featureService = featureService; this.desiredNodesValidator = desiredNodesValidator; this.taskQueue = clusterService.createTaskQueue( "update-desired-nodes", Priority.URGENT, - new UpdateDesiredNodesExecutor(clusterService.getRerouteService(), allocationService) + new UpdateDesiredNodesExecutor(rerouteService, allocationService) ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index da44265f87436..e4093486da39c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.Priority; @@ -48,12 +49,14 @@ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAct private static final Logger logger = LogManager.getLogger(TransportClusterUpdateSettingsAction.class); + private final RerouteService rerouteService; private final ClusterSettings clusterSettings; @Inject public TransportClusterUpdateSettingsAction( TransportService transportService, ClusterService clusterService, + RerouteService rerouteService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -71,6 +74,7 @@ public TransportClusterUpdateSettingsAction( ClusterUpdateSettingsResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + this.rerouteService = rerouteService; this.clusterSettings = clusterSettings; } @@ -191,7 +195,7 @@ private void reroute(final boolean updateSettingsAcked) { // the components (e.g. FilterAllocationDecider), so the changes made by the first call aren't visible to the components // until the ClusterStateListener instances have been invoked, but are visible after the first update task has been // completed. - clusterService.getRerouteService().reroute(REROUTE_TASK_SOURCE, Priority.URGENT, new ActionListener<>() { + rerouteService.reroute(REROUTE_TASK_SOURCE, Priority.URGENT, new ActionListener<>() { @Override public void onResponse(Void ignored) { listener.onResponse( diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java index 32c99f5baba85..3ad5e7fa43fe1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationService.java @@ -35,7 +35,6 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.logging.ESLogMessage; @@ -382,8 +381,7 @@ public CommandsResult reroute( * state. Should be called after every change to the cluster that affects the routing table and/or the balance of shards. *

    * This method is expensive in larger clusters. Wherever possible you should invoke this method asynchronously using - * {@link RerouteService#reroute} to batch up invocations rather than calling the method directly. The node's reroute service is - * typically obtained from {@link ClusterService#getRerouteService}. + * {@link RerouteService#reroute} to batch up invocations rather than calling the method directly. * * @return an updated cluster state, or the same instance that was passed as an argument if no changes were made. */ @@ -400,8 +398,7 @@ public ClusterState reroute(ClusterState clusterState, String reason, ActionList * state. Should be called after every change to the cluster that affects the routing table and/or the balance of shards. *

    * This method is expensive in larger clusters. Wherever possible you should invoke this method asynchronously using - * {@link RerouteService#reroute} to batch up invocations rather than calling the method directly. The node's reroute service is - * typically obtained from {@link ClusterService#getRerouteService}. + * {@link RerouteService#reroute} to batch up invocations rather than calling the method directly. * * @return an updated cluster state, or the same instance that was passed as an argument if no changes were made. */ diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index 67b6d64775dff..5c14b2ee1cbdf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.OperationRouting; -import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.ClusterSettings; @@ -31,8 +30,6 @@ import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; -import java.util.function.Supplier; - public class ClusterService extends AbstractLifecycleComponent { private final MasterService masterService; @@ -56,24 +53,11 @@ public class ClusterService extends AbstractLifecycleComponent { private final String nodeName; - private final Supplier rerouteService; - public ClusterService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, TaskManager taskManager) { - this(settings, clusterSettings, threadPool, taskManager, () -> { throw new IllegalStateException("RerouteService not provided"); }); - } - - public ClusterService( - Settings settings, - ClusterSettings clusterSettings, - ThreadPool threadPool, - TaskManager taskManager, - Supplier rerouteService - ) { this( settings, clusterSettings, new MasterService(settings, clusterSettings, threadPool, taskManager), - rerouteService, new ClusterApplierService(Node.NODE_NAME_SETTING.get(settings), settings, clusterSettings, threadPool) ); } @@ -83,27 +67,10 @@ public ClusterService( ClusterSettings clusterSettings, MasterService masterService, ClusterApplierService clusterApplierService - ) { - this( - settings, - clusterSettings, - masterService, - () -> { throw new IllegalStateException("RerouteService not provided"); }, - clusterApplierService - ); - } - - public ClusterService( - Settings settings, - ClusterSettings clusterSettings, - MasterService masterService, - Supplier rerouteService, - ClusterApplierService clusterApplierService ) { this.settings = settings; this.nodeName = Node.NODE_NAME_SETTING.get(settings); this.masterService = masterService; - this.rerouteService = rerouteService; this.operationRouting = new OperationRouting(settings, clusterSettings); this.clusterSettings = clusterSettings; this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); @@ -120,10 +87,6 @@ public synchronized void setNodeConnectionsService(NodeConnectionsService nodeCo clusterApplierService.setNodeConnectionsService(nodeConnectionsService); } - public RerouteService getRerouteService() { - return rerouteService.get(); - } - @Override protected synchronized void doStart() { clusterApplierService.start(); diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 7a0fa7a821732..19a1310ed86aa 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -597,8 +597,7 @@ private void construct( telemetryProvider.getTracer() ); - final SetOnce rerouteServiceReference = new SetOnce<>(); - ClusterService clusterService = createClusterService(settingsModule, threadPool, taskManager, rerouteServiceReference::get); + ClusterService clusterService = createClusterService(settingsModule, threadPool, taskManager); clusterService.addStateApplier(scriptService); Supplier documentParsingObserverSupplier = getDocumentParsingObserverSupplier(); @@ -618,6 +617,7 @@ private void construct( SystemIndices systemIndices = createSystemIndices(settings); final SetOnce repositoriesServiceReference = new SetOnce<>(); + final SetOnce rerouteServiceReference = new SetOnce<>(); final ClusterInfoService clusterInfoService = serviceProvider.newClusterInfoService( pluginsService, settings, @@ -749,6 +749,7 @@ private void construct( record PluginServiceInstances( Client client, ClusterService clusterService, + RerouteService rerouteService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, @@ -767,6 +768,7 @@ record PluginServiceInstances( PluginServiceInstances pluginServices = new PluginServiceInstances( client, clusterService, + rerouteService, threadPool, createResourceWatcherService(settings, threadPool), scriptService, @@ -804,6 +806,7 @@ record PluginServiceInstances( systemIndices, telemetryProvider.getTracer(), clusterService, + rerouteService, buildReservedStateHandlers( settingsModule, clusterService, @@ -890,6 +893,7 @@ record PluginServiceInstances( SnapshotsService snapshotsService = new SnapshotsService( settings, clusterService, + rerouteService, clusterModule.getIndexNameExpressionResolver(), repositoryService, transportService, @@ -1064,18 +1068,12 @@ record PluginServiceInstances( postInjection(clusterModule, actionModule, clusterService, transportService, featureService); } - private ClusterService createClusterService( - SettingsModule settingsModule, - ThreadPool threadPool, - TaskManager taskManager, - Supplier rerouteService - ) { + private ClusterService createClusterService(SettingsModule settingsModule, ThreadPool threadPool, TaskManager taskManager) { ClusterService clusterService = new ClusterService( settingsModule.getSettings(), settingsModule.getClusterSettings(), threadPool, - taskManager, - rerouteService + taskManager ); resourcesToClose.add(clusterService); diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index de9f8186865aa..12ad05e2bc710 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.LifecycleComponent; @@ -78,6 +79,11 @@ public interface PluginServices { */ ClusterService clusterService(); + /** + * A service to reroute shards to other nodes + */ + RerouteService rerouteService(); + /** * A service to allow retrieving an executor to run an async action */ diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index f6d5ab3ead6af..76c2007dc8d8e 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; @@ -83,12 +84,16 @@ public class ReservedClusterStateService { * @param clusterService for fetching and saving the modified state * @param handlerList a list of reserved state handlers, which we use to transform the state */ - public ReservedClusterStateService(ClusterService clusterService, List> handlerList) { + public ReservedClusterStateService( + ClusterService clusterService, + RerouteService rerouteService, + List> handlerList + ) { this.clusterService = clusterService; this.updateTaskQueue = clusterService.createTaskQueue( "reserved state update", Priority.URGENT, - new ReservedStateUpdateTaskExecutor(clusterService.getRerouteService()) + new ReservedStateUpdateTaskExecutor(rerouteService) ); this.errorTaskQueue = clusterService.createTaskQueue("reserved state error", Priority.URGENT, new ReservedStateErrorTaskExecutor()); this.handlers = handlerList.stream().collect(Collectors.toMap(ReservedClusterStateHandler::name, Function.identity())); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 499ac7022403e..3b872f550fc6f 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -51,6 +51,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -153,6 +154,8 @@ public final class SnapshotsService extends AbstractLifecycleComponent implement private final ClusterService clusterService; + private final RerouteService rerouteService; + private final IndexNameExpressionResolver indexNameExpressionResolver; private final RepositoriesService repositoriesService; @@ -203,6 +206,7 @@ public final class SnapshotsService extends AbstractLifecycleComponent implement public SnapshotsService( Settings settings, ClusterService clusterService, + RerouteService rerouteService, IndexNameExpressionResolver indexNameExpressionResolver, RepositoriesService repositoriesService, TransportService transportService, @@ -210,6 +214,7 @@ public SnapshotsService( SystemIndices systemIndices ) { this.clusterService = clusterService; + this.rerouteService = rerouteService; this.indexNameExpressionResolver = indexNameExpressionResolver; this.repositoriesService = repositoriesService; this.threadPool = transportService.getThreadPool(); @@ -3712,7 +3717,7 @@ public ClusterState execute(BatchExecutionContext batchExecutionCo final ClusterState state = batchExecutionContext.initialState(); final SnapshotShardsUpdateContext shardsUpdateContext = new SnapshotShardsUpdateContext( batchExecutionContext, - () -> clusterService.getRerouteService().reroute("after shards snapshot update", Priority.NORMAL, ActionListener.noop()) + () -> rerouteService.reroute("after shards snapshot update", Priority.NORMAL, ActionListener.noop()) ); final SnapshotsInProgress initialSnapshots = SnapshotsInProgress.get(state); SnapshotsInProgress snapshotsInProgress = shardsUpdateContext.computeUpdatedState(); diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 4f72357f83325..a076537bb7351 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -121,6 +121,7 @@ public void testSetupRestHandlerContainsKnownBuiltin() { null, null, mock(ClusterService.class), + null, List.of(), RestExtension.allowAll() ); @@ -181,6 +182,7 @@ public String getName() { null, null, mock(ClusterService.class), + null, List.of(), RestExtension.allowAll() ); @@ -234,6 +236,7 @@ public List getRestHandlers( null, null, mock(ClusterService.class), + null, List.of(), RestExtension.allowAll() ); @@ -282,6 +285,7 @@ public void test3rdPartyHandlerIsNotInstalled() { null, null, mock(ClusterService.class), + null, List.of(), RestExtension.allowAll() ) @@ -314,13 +318,14 @@ public void test3rdPartyRestControllerIsNotInstalled() { settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), threadPool, - Arrays.asList(secPlugin), + List.of(secPlugin), null, null, usageService, null, null, mock(ClusterService.class), + null, List.of(), RestExtension.allowAll() ) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java index 4e2948eafc1d7..00f46d8c42bf0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesActionTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.metadata.DesiredNodesTestCase; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.features.FeatureService; @@ -50,6 +51,7 @@ public void testWriteBlocks() { final TransportUpdateDesiredNodesAction action = new TransportUpdateDesiredNodesAction( transportService, mock(ClusterService.class), + mock(RerouteService.class), mock(FeatureService.class), threadPool, mock(ActionFilters.class), @@ -78,6 +80,7 @@ public void testNoBlocks() { final TransportUpdateDesiredNodesAction action = new TransportUpdateDesiredNodesAction( transportService, mock(ClusterService.class), + mock(RerouteService.class), mock(FeatureService.class), threadPool, mock(ActionFilters.class), diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java index 6373b94ffb94a..a1d2ef33d85f3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.ClusterSettings; @@ -91,6 +92,7 @@ public void testOperatorHandler() throws IOException { TransportClusterUpdateSettingsAction action = new TransportClusterUpdateSettingsAction( transportService, mock(ClusterService.class), + mock(RerouteService.class), threadPool, mock(ActionFilters.class), mock(IndexNameExpressionResolver.class), diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 86c48c1e183ea..559d3fce9cebf 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -1153,6 +1153,7 @@ public Collection getRestHeaders() { null, null, mock(ClusterService.class), + null, List.of(), RestExtension.allowAll() ); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 96b4df3b856b7..5968be34e985a 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -67,14 +67,12 @@ public void setUp() throws Exception { threadpool = new TestThreadPool("file_settings_service_tests"); - var reroute = mock(RerouteService.class); clusterService = spy( new ClusterService( Settings.builder().put(NODE_NAME_SETTING.getKey(), "test").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadpool, - new TaskManager(Settings.EMPTY, threadpool, Set.of()), - () -> reroute + new TaskManager(Settings.EMPTY, threadpool, Set.of()) ) ); @@ -101,7 +99,11 @@ public void setUp() throws Exception { ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - controller = new ReservedClusterStateService(clusterService, List.of(new ReservedClusterSettingsAction(clusterSettings))); + controller = new ReservedClusterStateService( + clusterService, + mock(RerouteService.class), + List.of(new ReservedClusterSettingsAction(clusterSettings)) + ); fileSettingsService = spy(new FileSettingsService(clusterService, controller, env)); } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java index e8f5a71ad6fcb..fe9401284b9f5 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedClusterStateServiceTests.java @@ -85,6 +85,7 @@ public void testOperatorController() throws IOException { ReservedClusterStateService controller = new ReservedClusterStateService( clusterService, + mock(RerouteService.class), List.of(new ReservedClusterSettingsAction(clusterSettings)) ); @@ -147,10 +148,9 @@ public void testUpdateStateTasks() throws Exception { ClusterService clusterService = mock(ClusterService.class); RerouteService rerouteService = mock(RerouteService.class); - when(clusterService.getRerouteService()).thenReturn(rerouteService); ClusterState state = ClusterState.builder(new ClusterName("test")).build(); - ReservedStateUpdateTaskExecutor taskExecutor = new ReservedStateUpdateTaskExecutor(clusterService.getRerouteService()); + ReservedStateUpdateTaskExecutor taskExecutor = new ReservedStateUpdateTaskExecutor(rerouteService); AtomicBoolean successCalled = new AtomicBoolean(false); @@ -362,7 +362,9 @@ public void onFailure(Exception e) {} ); ClusterService clusterService = mock(ClusterService.class); - final var controller = spy(new ReservedClusterStateService(clusterService, List.of(newStateMaker, exceptionThrower))); + final var controller = spy( + new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of(newStateMaker, exceptionThrower)) + ); var trialRunResult = controller.trialRun("namespace_one", state, chunk, new LinkedHashSet<>(orderedHandlers)); assertEquals(0, trialRunResult.nonStateTransforms().size()); @@ -440,7 +442,7 @@ public void testHandlerOrdering() { ReservedClusterStateHandler> oh3 = makeHandlerHelper("three", List.of("two")); ClusterService clusterService = mock(ClusterService.class); - final var controller = new ReservedClusterStateService(clusterService, List.of(oh1, oh2, oh3)); + final var controller = new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of(oh1, oh2, oh3)); Collection ordered = controller.orderedStateHandlers(Set.of("one", "two", "three")); assertThat(ordered, contains("two", "three", "one")); @@ -460,7 +462,7 @@ public void testHandlerOrdering() { // Change the second handler so that we create cycle oh2 = makeHandlerHelper("two", List.of("one")); - final var controller1 = new ReservedClusterStateService(clusterService, List.of(oh1, oh2)); + final var controller1 = new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of(oh1, oh2)); assertThat( expectThrows(IllegalStateException.class, () -> controller1.orderedStateHandlers(Set.of("one", "two"))).getMessage(), @@ -484,6 +486,7 @@ public void testDuplicateHandlerNames() { IllegalStateException.class, () -> new ReservedClusterStateService( clusterService, + mock(RerouteService.class), List.of(new ReservedClusterSettingsAction(clusterSettings), new TestHandler()) ) ).getMessage().startsWith("Duplicate key cluster_settings") @@ -496,7 +499,7 @@ public void testCheckAndReportError() { when(clusterService.state()).thenReturn(state); when(clusterService.createTaskQueue(any(), any(), any())).thenReturn(mockTaskQueue()); - final var controller = spy(new ReservedClusterStateService(clusterService, List.of())); + final var controller = spy(new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of())); assertNull(controller.checkAndReportError("test", List.of(), null)); verify(controller, times(0)).updateErrorState(any()); @@ -568,7 +571,9 @@ public Map fromXContent(XContentParser parser) throws IOExceptio var orderedHandlers = List.of(exceptionThrower.name(), newStateMaker.name()); ClusterService clusterService = mock(ClusterService.class); - final var controller = spy(new ReservedClusterStateService(clusterService, List.of(newStateMaker, exceptionThrower))); + final var controller = spy( + new ReservedClusterStateService(clusterService, mock(RerouteService.class), List.of(newStateMaker, exceptionThrower)) + ); var trialRunResult = controller.trialRun("namespace_one", state, chunk, new LinkedHashSet<>(orderedHandlers)); @@ -631,7 +636,7 @@ public Map fromXContent(XContentParser parser) throws IOExceptio var chunk = new ReservedStateChunk(chunkMap, new ReservedStateVersion(2L, Version.CURRENT)); ClusterService clusterService = mock(ClusterService.class); - final var controller = spy(new ReservedClusterStateService(clusterService, handlers)); + final var controller = spy(new ReservedClusterStateService(clusterService, mock(RerouteService.class), handlers)); var trialRunResult = controller.trialRun( "namespace_one", diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 26f91d360db83..5b59040bbb04d 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1612,7 +1612,6 @@ private final class TestClusterNode { settings, clusterSettings, masterService, - () -> (reason, priority, listener) -> listener.onResponse(null), new ClusterApplierService(node.getName(), settings, clusterSettings, threadPool) { @Override protected PrioritizedEsThreadPoolExecutor createThreadPoolExecutor() { @@ -1722,6 +1721,7 @@ protected void assertSnapshotOrGenericThread() { snapshotsService = new SnapshotsService( settings, clusterService, + (reason, priority, listener) -> listener.onResponse(null), indexNameExpressionResolver, repositoriesService, transportService, diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMigrateToDataTiersAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMigrateToDataTiersAction.java index ae0df89c9bb8f..8cc14a42eb5f3 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMigrateToDataTiersAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMigrateToDataTiersAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.Priority; @@ -46,6 +47,7 @@ public class TransportMigrateToDataTiersAction extends TransportMasterNodeAction private static final Logger logger = LogManager.getLogger(TransportMigrateToDataTiersAction.class); + private final RerouteService rerouteService; private final NamedXContentRegistry xContentRegistry; private final Client client; private final XPackLicenseState licenseState; @@ -54,6 +56,7 @@ public class TransportMigrateToDataTiersAction extends TransportMasterNodeAction public TransportMigrateToDataTiersAction( TransportService transportService, ClusterService clusterService, + RerouteService rerouteService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -72,6 +75,7 @@ public TransportMigrateToDataTiersAction( MigrateToDataTiersResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + this.rerouteService = rerouteService; this.xContentRegistry = xContentRegistry; this.client = client; this.licenseState = licenseState; @@ -141,20 +145,19 @@ public void onFailure(Exception e) { @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - clusterService.getRerouteService() - .reroute("cluster migrated to data tiers routing", Priority.NORMAL, new ActionListener() { - @Override - public void onResponse(Void ignored) {} - - @Override - public void onFailure(Exception e) { - logger.log( - MasterService.isPublishFailureException(e) ? Level.DEBUG : Level.WARN, - "unsuccessful reroute after migration to data tiers routing", - e - ); - } - }); + rerouteService.reroute("cluster migrated to data tiers routing", Priority.NORMAL, new ActionListener() { + @Override + public void onResponse(Void ignored) {} + + @Override + public void onFailure(Exception e) { + logger.log( + MasterService.isPublishFailureException(e) ? Level.DEBUG : Level.WARN, + "unsuccessful reroute after migration to data tiers routing", + e + ); + } + }); MigratedEntities entities = migratedEntities.get(); listener.onResponse( new MigrateToDataTiersResponse( diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java index e39463141c777..b37eb8f99f52c 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/ReservedLifecycleStateServiceTests.java @@ -299,6 +299,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { ReservedClusterStateService controller = new ReservedClusterStateService( clusterService, + null, List.of(new ReservedClusterSettingsAction(clusterSettings)) ); @@ -371,6 +372,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { controller = new ReservedClusterStateService( clusterService, + null, List.of( new ReservedClusterSettingsAction(clusterSettings), new ReservedLifecycleAction(xContentRegistry(), client, licenseState) @@ -393,6 +395,7 @@ public void testOperatorControllerWithPluginPackage() { ReservedClusterStateService controller = new ReservedClusterStateService( clusterService, + null, List.of(new ReservedClusterSettingsAction(clusterSettings)) ); @@ -430,6 +433,7 @@ public void testOperatorControllerWithPluginPackage() { controller = new ReservedClusterStateService( clusterService, + null, List.of( new ReservedClusterSettingsAction(clusterSettings), new ReservedLifecycleAction(xContentRegistry(), client, licenseState) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java index 955cf0396326b..406ea50315de0 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; @@ -78,15 +77,14 @@ public class OldLuceneVersions extends Plugin implements IndexStorePlugin, Clust @Override public Collection createComponents(PluginServices services) { - ClusterService clusterService = services.clusterService(); ThreadPool threadPool = services.threadPool(); - this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), clusterService.getRerouteService())); + this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), services.rerouteService())); if (DiscoveryNode.isMasterNode(services.environment().settings())) { // We periodically look through the indices and identify if there are any archive indices, // then marking the feature as used. We do this on each master node so that if one master fails, the // continue reporting usage state. - var usageTracker = new ArchiveUsageTracker(getLicenseState(), clusterService::state); + var usageTracker = new ArchiveUsageTracker(getLicenseState(), services.clusterService()::state); threadPool.scheduleWithFixedDelay(usageTracker, TimeValue.timeValueMinutes(15), threadPool.generic()); } return List.of(); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 45c38e52ad9c3..83a38a4d0b328 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -321,7 +321,7 @@ public Collection createComponents(PluginServices services) { final List components = new ArrayList<>(); this.repositoriesServiceSupplier = services.repositoriesServiceSupplier(); this.threadPool.set(threadPool); - this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), clusterService.getRerouteService())); + this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), services.rerouteService())); if (DiscoveryNode.canContainData(settings)) { final CacheService cacheService = new CacheService(settings, clusterService, threadPool, new PersistentCache(nodeEnvironment)); this.cacheService.set(cacheService); @@ -357,12 +357,12 @@ public Collection createComponents(PluginServices services) { threadPool.scheduleWithFixedDelay(usageTracker, TimeValue.timeValueMinutes(15), threadPool.generic()); } - this.allocator.set(new SearchableSnapshotAllocator(client, clusterService.getRerouteService(), frozenCacheInfoService)); + this.allocator.set(new SearchableSnapshotAllocator(client, services.rerouteService(), frozenCacheInfoService)); components.add(new FrozenCacheServiceSupplier(frozenCacheService.get())); components.add(new CacheServiceSupplier(cacheService.get())); if (DiscoveryNode.isMasterNode(settings)) { new SearchableSnapshotIndexMetadataUpgrader(clusterService, threadPool).initialize(); - clusterService.addListener(new RepositoryUuidWatcher(clusterService.getRerouteService())); + clusterService.addListener(new RepositoryUuidWatcher(services.rerouteService())); } return Collections.unmodifiableList(components); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 3f29944631d42..6773da137ac96 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -772,13 +772,14 @@ public void testSecurityRestHandlerInterceptorCanBeInstalled() throws IllegalAcc settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), threadPool, - Arrays.asList(security), + List.of(security), null, null, usageService, null, Tracer.NOOP, mock(ClusterService.class), + null, List.of(), RestExtension.allowAll() ); diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeAction.java index 3e0e578ded120..caf8ae0e3107b 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeAction.java @@ -40,6 +40,7 @@ public class TransportDeleteShutdownNodeAction extends AcknowledgedTransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(TransportDeleteShutdownNodeAction.class); + private final RerouteService rerouteService; private final MasterServiceTaskQueue taskQueue; private static boolean deleteShutdownNodeState(Map shutdownMetadata, Request request) { @@ -89,8 +90,7 @@ public ClusterState execute(BatchExecutionContext batchE taskContext.onFailure(e); continue; } - var reroute = clusterService.getRerouteService(); - taskContext.success(() -> ackAndReroute(request, taskContext.getTask().listener(), reroute)); + taskContext.success(() -> ackAndReroute(request, taskContext.getTask().listener(), rerouteService)); } if (changed == false) { return batchExecutionContext.initialState(); @@ -108,6 +108,7 @@ public ClusterState execute(BatchExecutionContext batchE public TransportDeleteShutdownNodeAction( TransportService transportService, ClusterService clusterService, + RerouteService rerouteService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver @@ -123,6 +124,7 @@ public TransportDeleteShutdownNodeAction( indexNameExpressionResolver, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + this.rerouteService = rerouteService; taskQueue = clusterService.createTaskQueue("delete-node-shutdown", Priority.URGENT, new DeleteShutdownNodeExecutor()); } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java index 767c128030538..7946bb7e46627 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java @@ -43,6 +43,7 @@ public class TransportPutShutdownNodeAction extends AcknowledgedTransportMasterNodeAction { private static final Logger logger = LogManager.getLogger(TransportPutShutdownNodeAction.class); + private final RerouteService rerouteService; private final MasterServiceTaskQueue taskQueue; private final PutShutdownNodeExecutor executor = new PutShutdownNodeExecutor(); @@ -137,8 +138,7 @@ public ClusterState execute(BatchExecutionContext batchExec taskContext.onFailure(e); continue; } - var reroute = clusterService.getRerouteService(); - taskContext.success(() -> ackAndMaybeReroute(request, taskContext.getTask().listener(), reroute)); + taskContext.success(() -> ackAndMaybeReroute(request, taskContext.getTask().listener(), rerouteService)); } if (changed == false) { return batchExecutionContext.initialState(); @@ -156,6 +156,7 @@ public ClusterState execute(BatchExecutionContext batchExec public TransportPutShutdownNodeAction( TransportService transportService, ClusterService clusterService, + RerouteService rerouteService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver @@ -171,6 +172,7 @@ public TransportPutShutdownNodeAction( indexNameExpressionResolver, EsExecutors.DIRECT_EXECUTOR_SERVICE ); + this.rerouteService = rerouteService; taskQueue = clusterService.createTaskQueue("put-shutdown", Priority.URGENT, new PutShutdownNodeExecutor()); } diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeActionTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeActionTests.java index cf28bf9922b24..82b1427fc8e4f 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeActionTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeActionTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.NodesShutdownMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.test.ESTestCase; @@ -57,6 +58,7 @@ public void init() { var threadPool = mock(ThreadPool.class); var transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); clusterService = mock(ClusterService.class); + var rerouteService = mock(RerouteService.class); var actionFilters = mock(ActionFilters.class); var indexNameExpressionResolver = mock(IndexNameExpressionResolver.class); when(clusterService.createTaskQueue(any(), any(), Mockito.>any())).thenReturn( @@ -65,6 +67,7 @@ public void init() { action = new TransportDeleteShutdownNodeAction( transportService, clusterService, + rerouteService, threadPool, actionFilters, indexNameExpressionResolver diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java index cbd51ceebc729..1ea85f4ef07cf 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterStateTaskExecutor.TaskContext; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.Type; +import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.core.TimeValue; @@ -63,6 +64,7 @@ public void init() { var threadPool = mock(ThreadPool.class); var transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); clusterService = mock(ClusterService.class); + var rerouteService = mock(RerouteService.class); var actionFilters = mock(ActionFilters.class); var indexNameExpressionResolver = mock(IndexNameExpressionResolver.class); when(clusterService.createTaskQueue(any(), any(), Mockito.>any())).thenReturn( @@ -71,6 +73,7 @@ public void init() { action = new TransportPutShutdownNodeAction( transportService, clusterService, + rerouteService, threadPool, actionFilters, indexNameExpressionResolver diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java index 954b5ba024418..c2e3786a1afe7 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/action/ReservedSnapshotLifecycleStateServiceTests.java @@ -277,6 +277,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { ReservedClusterStateService controller = new ReservedClusterStateService( clusterService, + null, List.of(new ReservedClusterSettingsAction(clusterSettings), new ReservedRepositoryAction(repositoriesService)) ); @@ -347,6 +348,7 @@ public void testOperatorControllerFromJSONContent() throws IOException { controller = new ReservedClusterStateService( clusterService, + null, List.of( new ReservedClusterSettingsAction(clusterSettings), new ReservedSnapshotAction(), From c9f3a6c6c761cf4be7811b17552874d04b2becd8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 1 Dec 2023 11:02:21 +0100 Subject: [PATCH 126/263] [LTR] Better handling of missing parameters. (#102601) --- .../mustache/CustomMustacheFactory.java | 59 +++++++++++++-- .../CustomReflectionObjectHandler.java | 36 ++++++++++ .../MustacheInvalidParameterException.java | 17 +++++ .../script/mustache/MustacheScriptEngine.java | 29 ++++++-- .../mustache/MustacheScriptEngineTests.java | 55 +++++++++++++- .../trainedmodel/LearnToRankConfig.java | 6 ++ x-pack/plugin/ml/build.gradle | 1 + .../ml/qa/basic-multi-node/build.gradle | 4 ++ x-pack/plugin/ml/qa/disabled/build.gradle | 7 +- .../qa/native-multi-node-tests/build.gradle | 2 +- .../ml/qa/single-node-tests/build.gradle | 4 ++ .../plugin/ml/src/main/java/module-info.java | 1 + .../ml/inference/ltr/LearnToRankService.java | 41 +++++++---- .../ltr/LearnToRankServiceTests.java | 71 +++++++++++++------ 14 files changed, 283 insertions(+), 50 deletions(-) create mode 100644 modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheInvalidParameterException.java diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java index 73669ccacdbc6..49ad8302605cf 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java @@ -47,6 +47,7 @@ public final class CustomMustacheFactory extends DefaultMustacheFactory { static final String X_WWW_FORM_URLENCODED_MEDIA_TYPE = "application/x-www-form-urlencoded"; private static final String DEFAULT_MEDIA_TYPE = JSON_MEDIA_TYPE; + private static final boolean DEFAULT_DETECT_MISSING_PARAMS = false; private static final Map> ENCODERS = Map.of( V7_JSON_MEDIA_TYPE_WITH_CHARSET, @@ -63,14 +64,30 @@ public final class CustomMustacheFactory extends DefaultMustacheFactory { private final Encoder encoder; + /** + * Initializes a CustomMustacheFactory object with a specified mediaType. + * + * @deprecated Use {@link #builder()} instead to retrieve a {@link Builder} object that can be used to create a factory. + */ + @Deprecated public CustomMustacheFactory(String mediaType) { - super(); - setObjectHandler(new CustomReflectionObjectHandler()); - this.encoder = createEncoder(mediaType); + this(mediaType, DEFAULT_DETECT_MISSING_PARAMS); } + /** + * Default constructor for the factory. + * + * @deprecated Use {@link #builder()} instead to retrieve a {@link Builder} object that can be used to create a factory. + */ + @Deprecated public CustomMustacheFactory() { - this(DEFAULT_MEDIA_TYPE); + this(DEFAULT_MEDIA_TYPE, DEFAULT_DETECT_MISSING_PARAMS); + } + + private CustomMustacheFactory(String mediaType, boolean detectMissingParams) { + super(); + setObjectHandler(new CustomReflectionObjectHandler(detectMissingParams)); + this.encoder = createEncoder(mediaType); } @Override @@ -95,6 +112,10 @@ public MustacheVisitor createMustacheVisitor() { return new CustomMustacheVisitor(this); } + public static Builder builder() { + return new Builder(); + } + class CustomMustacheVisitor extends DefaultMustacheVisitor { CustomMustacheVisitor(DefaultMustacheFactory df) { @@ -360,4 +381,34 @@ public void encode(String s, Writer writer) throws IOException { writer.write(URLEncoder.encode(s, StandardCharsets.UTF_8)); } } + + /** + * Build a new {@link CustomMustacheFactory} object. + */ + public static class Builder { + private String mediaType = DEFAULT_MEDIA_TYPE; + private boolean detectMissingParams = DEFAULT_DETECT_MISSING_PARAMS; + + private Builder() {} + + public Builder mediaType(String mediaType) { + this.mediaType = mediaType; + return this; + } + + /** + * Sets the behavior for handling missing parameters during template execution. + * + * @param detectMissingParams If true, an exception is thrown when executing the template with missing parameters. + * If false, the template gracefully handles missing parameters without throwing an exception. + */ + public Builder detectMissingParams(boolean detectMissingParams) { + this.detectMissingParams = detectMissingParams; + return this; + } + + public CustomMustacheFactory build() { + return new CustomMustacheFactory(mediaType, detectMissingParams); + } + } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java index c1e87fdc0970e..491ec6c851342 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java @@ -8,7 +8,15 @@ package org.elasticsearch.script.mustache; +import com.github.mustachejava.Binding; +import com.github.mustachejava.Code; +import com.github.mustachejava.ObjectHandler; +import com.github.mustachejava.TemplateContext; +import com.github.mustachejava.codes.ValueCode; +import com.github.mustachejava.reflect.GuardedBinding; +import com.github.mustachejava.reflect.MissingWrapper; import com.github.mustachejava.reflect.ReflectionObjectHandler; +import com.github.mustachejava.util.Wrapper; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; @@ -19,10 +27,16 @@ import java.util.AbstractMap; import java.util.Collection; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.Set; final class CustomReflectionObjectHandler extends ReflectionObjectHandler { + private final boolean detectMissingParams; + + CustomReflectionObjectHandler(boolean detectMissingParams) { + this.detectMissingParams = detectMissingParams; + } @Override public Object coerce(Object object) { @@ -41,6 +55,11 @@ public Object coerce(Object object) { } } + @Override + public Binding createBinding(String name, TemplateContext tc, Code code) { + return detectMissingParams ? new DetectMissingParamsGuardedBinding(this, name, tc, code) : super.createBinding(name, tc, code); + } + @Override @SuppressWarnings("rawtypes") protected AccessibleObject findMember(Class sClass, String name) { @@ -59,6 +78,23 @@ protected AccessibleObject findMember(Class sClass, String name) { return null; } + static class DetectMissingParamsGuardedBinding extends GuardedBinding { + private final Code code; + + DetectMissingParamsGuardedBinding(ObjectHandler oh, String name, TemplateContext tc, Code code) { + super(oh, name, tc, code); + this.code = code; + } + + protected synchronized Wrapper getWrapper(String name, List scopes) { + Wrapper wrapper = super.getWrapper(name, scopes); + if (wrapper instanceof MissingWrapper && code instanceof ValueCode) { + throw new MustacheInvalidParameterException("Parameter [" + name + "] is missing"); + } + return wrapper; + } + } + static final class ArrayMap extends AbstractMap implements Iterable { private final Object array; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheInvalidParameterException.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheInvalidParameterException.java new file mode 100644 index 0000000000000..9aaf8cdae89ad --- /dev/null +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheInvalidParameterException.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.script.mustache; + +import com.github.mustachejava.MustacheException; + +public class MustacheInvalidParameterException extends MustacheException { + MustacheInvalidParameterException(String message) { + super(message, null, null); + } +} diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java index c6f60c48c4ab4..61102de0ab5a4 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java @@ -38,6 +38,10 @@ * {@link Mustache} object can then be re-used for subsequent executions. */ public final class MustacheScriptEngine implements ScriptEngine { + /** + * Compiler option to enable detection of missing parameters. + */ + public static final String DETECT_MISSING_PARAMS_OPTION = "detect_missing_params"; private static final Logger logger = LogManager.getLogger(MustacheScriptEngine.class); public static final String NAME = "mustache"; @@ -72,10 +76,20 @@ public Set> getSupportedContexts() { } private static CustomMustacheFactory createMustacheFactory(Map options) { - if (options == null || options.isEmpty() || options.containsKey(Script.CONTENT_TYPE_OPTION) == false) { - return new CustomMustacheFactory(); + CustomMustacheFactory.Builder builder = CustomMustacheFactory.builder(); + if (options == null || options.isEmpty()) { + return builder.build(); + } + + if (options.containsKey(Script.CONTENT_TYPE_OPTION)) { + builder.mediaType(options.get(Script.CONTENT_TYPE_OPTION)); + } + + if (options.containsKey(DETECT_MISSING_PARAMS_OPTION)) { + builder.detectMissingParams(Boolean.valueOf(options.get(DETECT_MISSING_PARAMS_OPTION))); } - return new CustomMustacheFactory(options.get(Script.CONTENT_TYPE_OPTION)); + + return builder.build(); } @Override @@ -107,10 +121,17 @@ public String execute() { try { template.execute(writer, params); } catch (Exception e) { - logger.error(() -> format("Error running %s", template), e); + if (shouldLogException(e)) { + logger.error(() -> format("Error running %s", template), e); + } throw new GeneralScriptException("Error running " + template, e); } return writer.toString(); } + + public boolean shouldLogException(Throwable e) { + return e.getCause() != null && e.getCause() instanceof MustacheInvalidParameterException == false; + } } + } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index 0d3e881e54a56..4896584d7aadf 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -9,6 +9,7 @@ import com.github.mustachejava.MustacheFactory; +import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.test.ESTestCase; @@ -18,10 +19,13 @@ import java.io.IOException; import java.io.StringWriter; +import java.util.Collections; import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; /** * Mustache based templating test @@ -33,7 +37,7 @@ public class MustacheScriptEngineTests extends ESTestCase { @Before public void setup() { qe = new MustacheScriptEngine(); - factory = new CustomMustacheFactory(); + factory = CustomMustacheFactory.builder().build(); } public void testSimpleParameterReplace() { @@ -196,6 +200,55 @@ public void testSimple() throws IOException { assertThat(TemplateScript.execute(), equalTo("{\"match_all\":{}}")); } + public void testDetectMissingParam() { + Map scriptOptions = Map.ofEntries(Map.entry(MustacheScriptEngine.DETECT_MISSING_PARAMS_OPTION, "true")); + + // fails when a param is missing and the DETECT_MISSING_PARAMS_OPTION option is set to true. + { + String source = "{\"match\": { \"field\": \"{{query_string}}\" }"; + TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions); + Map params = Collections.emptyMap(); + GeneralScriptException e = expectThrows(GeneralScriptException.class, () -> compiled.newInstance(params).execute()); + assertThat(e.getRootCause(), instanceOf(MustacheInvalidParameterException.class)); + assertThat(e.getRootCause().getMessage(), startsWith("Parameter [query_string] is missing")); + } + + // fails when params is null and the DETECT_MISSING_PARAMS_OPTION option is set to true. + { + String source = "{\"match\": { \"field\": \"{{query_string}}\" }"; + TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions); + GeneralScriptException e = expectThrows(GeneralScriptException.class, () -> compiled.newInstance(null).execute()); + assertThat(e.getRootCause(), instanceOf(MustacheInvalidParameterException.class)); + assertThat(e.getRootCause().getMessage(), startsWith("Parameter [query_string] is missing")); + } + + // works as expected when params are specified and the DETECT_MISSING_PARAMS_OPTION option is set to true + { + String source = "{\"match\": { \"field\": \"{{query_string}}\" }"; + TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions); + Map params = Map.ofEntries(Map.entry("query_string", "foo")); + assertThat(compiled.newInstance(params).execute(), equalTo("{\"match\": { \"field\": \"foo\" }")); + } + + // do not throw when using a missing param in the conditional when DETECT_MISSING_PARAMS_OPTION option is set to true + { + String source = "{\"match\": { \"field\": \"{{#query_string}}{{.}}{{/query_string}}\" }"; + TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions); + Map params = Map.of(); + assertThat(compiled.newInstance(params).execute(), equalTo("{\"match\": { \"field\": \"\" }")); + } + } + + public void testMissingParam() { + Map scriptOptions = Collections.emptyMap(); + String source = "{\"match\": { \"field\": \"{{query_string}}\" }"; + TemplateScript.Factory compiled = qe.compile(null, source, TemplateScript.CONTEXT, scriptOptions); + + // When the DETECT_MISSING_PARAMS_OPTION is not specified, missing variable is replaced with an empty string. + assertThat(compiled.newInstance(Collections.emptyMap()).execute(), equalTo("{\"match\": { \"field\": \"\" }")); + assertThat(compiled.newInstance(null).execute(), equalTo("{\"match\": { \"field\": \"\" }")); + } + public void testParseTemplateAsSingleStringWithConditionalClause() throws IOException { String templateString = """ { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java index 5ce5b0188771b..89dcf746d7927 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryRewriteContext; @@ -163,6 +164,11 @@ public int hashCode() { return Objects.hash(super.hashCode(), featureExtractorBuilders); } + @Override + public final String toString() { + return Strings.toString(this); + } + @Override public boolean isTargetTypeSupported(TargetType targetType) { return TargetType.REGRESSION.equals(targetType); diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 2373dc4d54c99..22cdb752d1e8d 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -92,6 +92,7 @@ dependencies { testImplementation project(path: xpackModule('wildcard')) // ml deps api project(':libs:elasticsearch-grok') + api project(':modules:lang-mustache') api "org.apache.commons:commons-math3:3.6.1" api "com.ibm.icu:icu4j:${versions.icu4j}" api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index 3268c15879b92..fca019a6fc689 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -3,6 +3,10 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-java-rest-test' +dependencies { + javaRestTestImplementation(project(':modules:lang-mustache')) +} + testClusters.configureEach { testDistribution = 'DEFAULT' numberOfNodes = 3 diff --git a/x-pack/plugin/ml/qa/disabled/build.gradle b/x-pack/plugin/ml/qa/disabled/build.gradle index 97a7b0eed73ad..232700d5f84aa 100644 --- a/x-pack/plugin/ml/qa/disabled/build.gradle +++ b/x-pack/plugin/ml/qa/disabled/build.gradle @@ -2,10 +2,9 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-java-rest-test' -//dependencies { -// testImplementation project(":x-pack:plugin:core") -// testImplementation project(path: xpackModule('ml')) -//} +dependencies { + javaRestTestImplementation(project(':modules:lang-mustache')) +} testClusters.configureEach { testDistribution = 'DEFAULT' diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index ae537f865e65f..db53b9aec7f1f 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { @@ -6,6 +5,7 @@ dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('ml')))) javaRestTestImplementation(testArtifact(project(xpackModule('security')))) javaRestTestImplementation project(path: ':modules:ingest-common') + javaRestTestImplementation(project(':modules:lang-mustache')) javaRestTestImplementation project(path: ':modules:reindex') javaRestTestImplementation project(path: ':modules:transport-netty4') javaRestTestImplementation project(path: xpackModule('autoscaling')) diff --git a/x-pack/plugin/ml/qa/single-node-tests/build.gradle b/x-pack/plugin/ml/qa/single-node-tests/build.gradle index eb86ca600d75f..6979ec4dcbd31 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/single-node-tests/build.gradle @@ -2,6 +2,10 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-java-rest-test' +dependencies { + javaRestTestImplementation(project(':modules:lang-mustache')) +} + testClusters.configureEach { testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'false' diff --git a/x-pack/plugin/ml/src/main/java/module-info.java b/x-pack/plugin/ml/src/main/java/module-info.java index a73c9bdfa32b4..0f3fdd836feca 100644 --- a/x-pack/plugin/ml/src/main/java/module-info.java +++ b/x-pack/plugin/ml/src/main/java/module-info.java @@ -17,6 +17,7 @@ requires org.elasticsearch.grok; requires org.elasticsearch.server; requires org.elasticsearch.xcontent; + requires org.elasticsearch.mustache; requires org.apache.httpcomponents.httpcore; requires org.apache.httpcomponents.httpclient; requires org.apache.httpcomponents.httpasyncclient; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java index 1443ccd687620..42f7d8cf0a3b3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java @@ -9,10 +9,14 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.TemplateScript; +import org.elasticsearch.script.mustache.MustacheInvalidParameterException; +import org.elasticsearch.script.mustache.MustacheScriptEngine; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -37,11 +41,15 @@ import java.util.Map; import java.util.Optional; +import static java.util.Map.entry; import static org.elasticsearch.script.Script.DEFAULT_TEMPLATE_LANG; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.INFERENCE_CONFIG_QUERY_BAD_FORMAT; public class LearnToRankService { + private static final Map SCRIPT_OPTIONS = Map.ofEntries( + entry(MustacheScriptEngine.DETECT_MISSING_PARAMS_OPTION, Boolean.TRUE.toString()) + ); private final ModelLoadingService modelLoadingService; private final TrainedModelProvider trainedModelProvider; private final ScriptService scriptService; @@ -126,11 +134,6 @@ private LearnToRankConfig applyParams(LearnToRankConfig config, Map featureExtractorBuilders = new ArrayList<>(); for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : config.getFeatureExtractorBuilders()) { @@ -176,15 +179,25 @@ private QueryExtractorBuilder applyParams(QueryExtractorBuilder queryExtractorBu return queryExtractorBuilder; } - Script script = new Script(ScriptType.INLINE, DEFAULT_TEMPLATE_LANG, templateSource, Collections.emptyMap()); - String parsedTemplate = scriptService.compile(script, TemplateScript.CONTEXT).newInstance(params).execute(); - // TODO: handle missing params. - XContentParser parser = XContentType.JSON.xContent().createParser(parserConfiguration, parsedTemplate); - - return new QueryExtractorBuilder( - queryExtractorBuilder.featureName(), - QueryProvider.fromXContent(parser, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT) - ); + try { + Script script = new Script(ScriptType.INLINE, DEFAULT_TEMPLATE_LANG, templateSource, SCRIPT_OPTIONS, Collections.emptyMap()); + String parsedTemplate = scriptService.compile(script, TemplateScript.CONTEXT).newInstance(params).execute(); + XContentParser parser = XContentType.JSON.xContent().createParser(parserConfiguration, parsedTemplate); + + return new QueryExtractorBuilder( + queryExtractorBuilder.featureName(), + QueryProvider.fromXContent(parser, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT) + ); + } catch (GeneralScriptException e) { + if (e.getRootCause().getClass().getName().equals(MustacheInvalidParameterException.class.getName())) { + // Can't use instanceof since it return unexpected result. + return new QueryExtractorBuilder( + queryExtractorBuilder.featureName(), + QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder()) + ); + } + throw e; + } } private String templateSource(QueryProvider queryProvider) throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java index e4d0225637fa1..a2cd0ff8856c6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java @@ -7,10 +7,12 @@ package org.elasticsearch.xpack.ml.inference.ltr; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; @@ -26,6 +28,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; +import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import org.elasticsearch.xpack.core.ml.utils.QueryProviderTests; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -98,7 +101,8 @@ public void testLoadLearnToRankConfig() throws Exception { ); ActionListener listener = mock(ActionListener.class); learnToRankService.loadLearnToRankConfig(GOOD_MODEL, Collections.emptyMap(), listener); - assertBusy(() -> verify(listener).onResponse(eq((LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig()))); + + verify(listener).onResponse(eq((LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig())); } @SuppressWarnings("unchecked") @@ -111,7 +115,8 @@ public void testLoadMissingLearnToRankConfig() throws Exception { ); ActionListener listener = mock(ActionListener.class); learnToRankService.loadLearnToRankConfig("non-existing-model", Collections.emptyMap(), listener); - assertBusy(() -> verify(listener).onFailure(isA(ResourceNotFoundException.class))); + + verify(listener).onFailure(isA(ResourceNotFoundException.class)); } @SuppressWarnings("unchecked") @@ -124,7 +129,8 @@ public void testLoadBadLearnToRankConfig() throws Exception { ); ActionListener listener = mock(ActionListener.class); learnToRankService.loadLearnToRankConfig(BAD_MODEL, Collections.emptyMap(), listener); - assertBusy(() -> verify(listener).onFailure(isA(ElasticsearchStatusException.class))); + + verify(listener).onFailure(isA(ElasticsearchStatusException.class)); } @SuppressWarnings("unchecked") @@ -136,27 +142,48 @@ public void testLoadLearnToRankConfigWithTemplate() throws Exception { xContentRegistry() ); - // When no parameters are provided we expect the templated queries not being part of the retrieved config. - ActionListener noParamsListener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(TEMPLATED_GOOD_MODEL, Collections.emptyMap(), noParamsListener); - assertBusy(() -> verify(noParamsListener).onResponse(argThat(retrievedConfig -> { - assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(2)); - assertEquals(retrievedConfig, TEMPLATED_GOOD_MODEL_CONFIG.getInferenceConfig()); - return true; - }))); + // When no parameters are provided we expect query to be rewritten into a match_none query. + { + ActionListener listener = mock(ActionListener.class); + SetOnce retrievedConfig = new SetOnce<>(); + + doAnswer(i -> { + retrievedConfig.set(i.getArgument(0, LearnToRankConfig.class)); + return null; + }).when(listener).onResponse(any()); + learnToRankService.loadLearnToRankConfig(TEMPLATED_GOOD_MODEL, null, listener); + + assertNotNull(retrievedConfig.get()); + assertThat(retrievedConfig.get().getFeatureExtractorBuilders(), hasSize(2)); + + assertEquals( + retrievedConfig.get(), + LearnToRankConfig.builder((LearnToRankConfig) TEMPLATED_GOOD_MODEL_CONFIG.getInferenceConfig()) + .setLearnToRankFeatureExtractorBuilders( + List.of( + new QueryExtractorBuilder("feature_1", QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder())), + new QueryExtractorBuilder("feature_2", QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder())) + ) + ) + .build() + ); + } // Now testing when providing all the params of the template. - ActionListener allParamsListener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig( - TEMPLATED_GOOD_MODEL, - Map.ofEntries(Map.entry("foo_param", "foo"), Map.entry("bar_param", "bar")), - allParamsListener - ); - assertBusy(() -> verify(allParamsListener).onResponse(argThat(retrievedConfig -> { - assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(2)); - assertEquals(retrievedConfig, GOOD_MODEL_CONFIG.getInferenceConfig()); - return true; - }))); + { + ActionListener listener = mock(ActionListener.class); + learnToRankService.loadLearnToRankConfig( + TEMPLATED_GOOD_MODEL, + Map.ofEntries(Map.entry("foo_param", "foo"), Map.entry("bar_param", "bar")), + listener + ); + + verify(listener).onResponse(argThat(retrievedConfig -> { + assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(2)); + assertEquals(retrievedConfig, GOOD_MODEL_CONFIG.getInferenceConfig()); + return true; + })); + } } @Override From bc0751d3928902fd83fd124de91a23000fa75b2c Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Fri, 1 Dec 2023 12:18:49 +0200 Subject: [PATCH 127/263] [CI] Fix for SearchCancellationIT (#102774) --- .../search/SearchCancellationIT.java | 11 ++-- .../AbstractSearchCancellationTestCase.java | 57 +++++++++++-------- 2 files changed, 36 insertions(+), 32 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index ad610954e86b6..c41984b468b11 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.search; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.MultiSearchResponse; @@ -50,8 +49,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102257") +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class SearchCancellationIT extends AbstractSearchCancellationTestCase { @Override @@ -288,12 +286,11 @@ public void testCancelFailedSearchWhenPartialResultDisallowed() throws Exception assertTrue("All SearchShardTasks should then be cancelled", shardQueryTask.isCancelled()); } }, 30, TimeUnit.SECONDS); - shardTaskLatch.countDown(); // unblock the shardTasks, allowing the test to conclude. } finally { + shardTaskLatch.countDown(); // unblock the shardTasks, allowing the test to conclude. searchThread.join(); - for (ScriptedBlockPlugin plugin : plugins) { - plugin.setBeforeExecution(() -> {}); - } + plugins.forEach(plugin -> plugin.setBeforeExecution(() -> {})); + searchShardBlockingPlugins.forEach(plugin -> plugin.setRunOnNewReaderContext((ReaderContext c) -> {})); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java index 7049954dc43fa..ba0921972778f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java @@ -35,7 +35,8 @@ import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -152,7 +153,7 @@ public static class ScriptedBlockPlugin extends MockScriptPlugin { private final AtomicInteger hits = new AtomicInteger(); - private final AtomicBoolean shouldBlock = new AtomicBoolean(true); + private final Semaphore shouldBlock = new Semaphore(Integer.MAX_VALUE); private final AtomicReference beforeExecution = new AtomicReference<>(); @@ -161,11 +162,16 @@ public void reset() { } public void disableBlock() { - shouldBlock.set(false); + shouldBlock.release(Integer.MAX_VALUE); } public void enableBlock() { - shouldBlock.set(true); + try { + shouldBlock.acquire(Integer.MAX_VALUE); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new AssertionError(e); + } } public void setBeforeExecution(Runnable runnable) { @@ -196,6 +202,23 @@ public Map, Object>> pluginScripts() { ); } + public void logIfBlocked(String logMessage) { + if (shouldBlock.tryAcquire(1) == false) { + LogManager.getLogger(AbstractSearchCancellationTestCase.class).info(logMessage); + } else { + shouldBlock.release(1); + } + } + + public void waitForLock(int timeout, TimeUnit timeUnit) { + try { + assertTrue(shouldBlock.tryAcquire(timeout, timeUnit)); + shouldBlock.release(1); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + private Object searchBlockScript(Map params) { final Runnable runnable = beforeExecution.get(); if (runnable != null) { @@ -204,11 +227,7 @@ private Object searchBlockScript(Map params) { LeafStoredFieldsLookup fieldsLookup = (LeafStoredFieldsLookup) params.get("_fields"); LogManager.getLogger(AbstractSearchCancellationTestCase.class).info("Blocking on the document {}", fieldsLookup.get("_id")); hits.incrementAndGet(); - try { - assertBusy(() -> assertFalse(shouldBlock.get())); - } catch (Exception e) { - throw new RuntimeException(e); - } + waitForLock(10, TimeUnit.SECONDS); return true; } @@ -226,15 +245,9 @@ private Object blockScript(Map params) { if (runnable != null) { runnable.run(); } - if (shouldBlock.get()) { - LogManager.getLogger(AbstractSearchCancellationTestCase.class).info("Blocking in reduce"); - } + logIfBlocked("Blocking in reduce"); hits.incrementAndGet(); - try { - assertBusy(() -> assertFalse(shouldBlock.get())); - } catch (Exception e) { - throw new RuntimeException(e); - } + waitForLock(10, TimeUnit.SECONDS); return 42; } @@ -243,15 +256,9 @@ private Object mapBlockScript(Map params) { if (runnable != null) { runnable.run(); } - if (shouldBlock.get()) { - LogManager.getLogger(AbstractSearchCancellationTestCase.class).info("Blocking in map"); - } + logIfBlocked("Blocking in map"); hits.incrementAndGet(); - try { - assertBusy(() -> assertFalse(shouldBlock.get())); - } catch (Exception e) { - throw new RuntimeException(e); - } + waitForLock(10, TimeUnit.SECONDS); return 1; } From bc17acdada7b67dfbc9cd2c9b10648d8bd188425 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 1 Dec 2023 11:23:53 +0100 Subject: [PATCH 128/263] Port repository-url to new TestFramework (#102588) removes docker-compose dependency from url-fixture --- modules/repository-url/build.gradle | 19 +-- .../RepositoryURLClientYamlTestSuiteIT.java | 22 +++ test/fixtures/url-fixture/Dockerfile | 14 -- test/fixtures/url-fixture/build.gradle | 21 --- test/fixtures/url-fixture/docker-compose.yml | 15 -- .../src/main/java/fixture/url/URLFixture.java | 63 ++++---- .../test/fixture/AbstractHttpFixture.java | 145 ++++++++++-------- 7 files changed, 146 insertions(+), 153 deletions(-) delete mode 100644 test/fixtures/url-fixture/Dockerfile delete mode 100644 test/fixtures/url-fixture/docker-compose.yml diff --git a/modules/repository-url/build.gradle b/modules/repository-url/build.gradle index 7b671802f3a2a..2850aee68a2fb 100644 --- a/modules/repository-url/build.gradle +++ b/modules/repository-url/build.gradle @@ -8,12 +8,9 @@ import org.elasticsearch.gradle.PropertyNormalization -apply plugin: 'elasticsearch.legacy-yaml-rest-test' -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' -apply plugin: 'elasticsearch.test.fixtures' - -final Project fixture = project(':test:fixtures:url-fixture') esplugin { description 'Module for URL repository' @@ -32,6 +29,8 @@ dependencies { api "commons-logging:commons-logging:${versions.commonslogging}" api "commons-codec:commons-codec:${versions.commonscodec}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" + yamlRestTestImplementation project(':test:fixtures:url-fixture') + internalClusterTestImplementation project(':test:fixtures:url-fixture') } tasks.named("thirdPartyAudit").configure { @@ -45,15 +44,7 @@ tasks.named("thirdPartyAudit").configure { ) } -testFixtures.useFixture(fixture.path, 'url-fixture') - -def fixtureAddress = { fixtureName -> - int ephemeralPort = fixture.postProcessFixture.ext."test.fixtures.${fixtureName}.tcp.80" - assert ephemeralPort > 0 - 'http://127.0.0.1:' + ephemeralPort -} - -File repositoryDir = fixture.fsRepositoryDir as File +//File repositoryDir = fixture.fsRepositoryDir as File testClusters.configureEach { // repositoryDir is used by a FS repository to create snapshots diff --git a/modules/repository-url/src/yamlRestTest/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/yamlRestTest/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java index 0958276656a81..a5b1a48f94ac9 100644 --- a/modules/repository-url/src/yamlRestTest/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java +++ b/modules/repository-url/src/yamlRestTest/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java @@ -8,6 +8,8 @@ package org.elasticsearch.repositories.url; +import fixture.url.URLFixture; + import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; @@ -22,11 +24,15 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import java.io.IOException; import java.net.InetAddress; @@ -42,6 +48,22 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + public static final URLFixture urlFixture = new URLFixture(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("repository-url") + .setting("path.repo", urlFixture::getRepositoryDir) + .setting("repositories.url.allowed_urls", () -> "http://snapshot.test*, " + urlFixture.getAddress()) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(urlFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + public RepositoryURLClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } diff --git a/test/fixtures/url-fixture/Dockerfile b/test/fixtures/url-fixture/Dockerfile deleted file mode 100644 index d6c1443fa1f85..0000000000000 --- a/test/fixtures/url-fixture/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM openjdk:17.0.2 - -ARG port -ARG workingDir -ARG repositoryDir - -ENV URL_FIXTURE_PORT=${port} -ENV URL_FIXTURE_WORKING_DIR=${workingDir} -ENV URL_FIXTURE_REPO_DIR=${repositoryDir} - -ENTRYPOINT exec java -classpath "/fixture/shared/*" \ - fixture.url.URLFixture "$URL_FIXTURE_PORT" "$URL_FIXTURE_WORKING_DIR" "$URL_FIXTURE_REPO_DIR" - -EXPOSE $port diff --git a/test/fixtures/url-fixture/build.gradle b/test/fixtures/url-fixture/build.gradle index d7d9fd2964c19..d8bcce6ce8211 100644 --- a/test/fixtures/url-fixture/build.gradle +++ b/test/fixtures/url-fixture/build.gradle @@ -6,30 +6,9 @@ * Side Public License, v 1. */ apply plugin: 'elasticsearch.java' -apply plugin: 'elasticsearch.test.fixtures' - description = 'Fixture for URL external service' -tasks.named("test").configure { enabled = false } dependencies { api project(':server') api project(':test:framework') } - -// These directories are shared between the URL repository and the FS repository in integration tests -project.ext { - fsRepositoryDir = file("${testFixturesDir}/fs-repository") -} - -tasks.named("preProcessFixture").configure { - dependsOn "jar", configurations.runtimeClasspath - doLast { - file("${testFixturesDir}/shared").mkdirs() - project.copy { - from jar - from configurations.runtimeClasspath - into "${testFixturesDir}/shared" - } - project.fsRepositoryDir.mkdirs() - } -} diff --git a/test/fixtures/url-fixture/docker-compose.yml b/test/fixtures/url-fixture/docker-compose.yml deleted file mode 100644 index edfc879b1cec3..0000000000000 --- a/test/fixtures/url-fixture/docker-compose.yml +++ /dev/null @@ -1,15 +0,0 @@ -version: '3' -services: - url-fixture: - build: - context: . - args: - port: 80 - workingDir: "/fixture/work" - repositoryDir: "/fixture/repo" - volumes: - - ./testfixtures_shared/shared:/fixture/shared - - ./testfixtures_shared/fs-repository:/fixture/repo - - ./testfixtures_shared/work:/fixture/work - ports: - - "80" diff --git a/test/fixtures/url-fixture/src/main/java/fixture/url/URLFixture.java b/test/fixtures/url-fixture/src/main/java/fixture/url/URLFixture.java index 3f6eed903765a..5192140f1af45 100644 --- a/test/fixtures/url-fixture/src/main/java/fixture/url/URLFixture.java +++ b/test/fixtures/url-fixture/src/main/java/fixture/url/URLFixture.java @@ -7,15 +7,17 @@ */ package fixture.url; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.fixture.AbstractHttpFixture; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; import java.io.IOException; import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; @@ -25,32 +27,17 @@ * This {@link URLFixture} exposes a filesystem directory over HTTP. It is used in repository-url * integration tests to expose a directory created by a regular FS repository. */ -public class URLFixture extends AbstractHttpFixture { +public class URLFixture extends AbstractHttpFixture implements TestRule { private static final Pattern RANGE_PATTERN = Pattern.compile("bytes=(\\d+)-(\\d+)$"); - private final Path repositoryDir; + private final TemporaryFolder temporaryFolder; + private Path repositoryDir; /** * Creates a {@link URLFixture} */ - private URLFixture(final int port, final String workingDir, final String repositoryDir) { - super(workingDir, port); - this.repositoryDir = dir(repositoryDir); - } - - public static void main(String[] args) throws Exception { - if (args == null || args.length != 3) { - throw new IllegalArgumentException("URLFixture "); - } - String workingDirectory = args[1]; - if (Files.exists(dir(workingDirectory)) == false) { - throw new IllegalArgumentException("Configured working directory " + workingDirectory + " does not exist"); - } - String repositoryDirectory = args[2]; - if (Files.exists(dir(repositoryDirectory)) == false) { - throw new IllegalArgumentException("Configured repository directory " + repositoryDirectory + " does not exist"); - } - final URLFixture fixture = new URLFixture(Integer.parseInt(args[0]), workingDirectory, repositoryDirectory); - fixture.listen(InetAddress.getByName("0.0.0.0"), false); + public URLFixture() { + super(); + this.temporaryFolder = new TemporaryFolder(); } @Override @@ -107,8 +94,32 @@ private AbstractHttpFixture.Response handleGetRequest(Request request) throws IO } } - @SuppressForbidden(reason = "Paths#get is fine - we don't have environment here") - private static Path dir(final String dir) { - return Paths.get(dir); + @Override + protected void before() throws Throwable { + this.temporaryFolder.create(); + this.repositoryDir = temporaryFolder.newFolder("repoDir").toPath(); + InetSocketAddress inetSocketAddress = resolveAddress("0.0.0.0", 0); + listen(inetSocketAddress, false); + } + + public String getRepositoryDir() { + if (repositoryDir == null) { + throw new IllegalStateException("Rule has not been started yet"); + } + return repositoryDir.toFile().getAbsolutePath(); + } + + private static InetSocketAddress resolveAddress(String address, int port) { + try { + return new InetSocketAddress(InetAddress.getByName(address), port); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + } + + @Override + protected void after() { + super.stop(); + this.temporaryFolder.delete(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java b/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java index 87b8f5f89ffad..8e7fae85e57f5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java +++ b/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.SuppressForbidden; +import org.junit.rules.ExternalResource; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -40,7 +41,7 @@ * Base class for test fixtures that requires a {@link HttpServer} to work. */ @SuppressForbidden(reason = "uses httpserver by design") -public abstract class AbstractHttpFixture { +public abstract class AbstractHttpFixture extends ExternalResource { protected static final Map TEXT_PLAIN_CONTENT_TYPE = contentType("text/plain; charset=utf-8"); protected static final Map JSON_CONTENT_TYPE = contentType("application/json; charset=utf-8"); @@ -51,8 +52,9 @@ public abstract class AbstractHttpFixture { private final AtomicLong requests = new AtomicLong(0); /** Current working directory of the fixture **/ - private final Path workingDirectory; - private final int port; + private Path workingDirectory; + private int port; + private HttpServer httpServer; protected AbstractHttpFixture(final String workingDir) { this(workingDir, 0); @@ -63,6 +65,8 @@ protected AbstractHttpFixture(final String workingDir, int port) { this.workingDirectory = PathUtils.get(Objects.requireNonNull(workingDir)); } + public AbstractHttpFixture() {} + /** * Opens a {@link HttpServer} and start listening on a provided or random port. */ @@ -75,85 +79,100 @@ public final void listen() throws IOException, InterruptedException { */ public final void listen(InetAddress inetAddress, boolean exposePidAndPort) throws IOException, InterruptedException { final InetSocketAddress socketAddress = new InetSocketAddress(inetAddress, port); - final HttpServer httpServer = HttpServer.create(socketAddress, 0); + listenAndWait(socketAddress, exposePidAndPort); + } + public final void listenAndWait(InetSocketAddress socketAddress, boolean exposePidAndPort) throws IOException, InterruptedException { try { - if (exposePidAndPort) { - /// Writes the PID of the current Java process in a `pid` file located in the working directory - writeFile(workingDirectory, "pid", ManagementFactory.getRuntimeMXBean().getName().split("@")[0]); - - final String addressAndPort = addressToString(httpServer.getAddress()); - // Writes the address and port of the http server in a `ports` file located in the working directory - writeFile(workingDirectory, "ports", addressAndPort); - } - - httpServer.createContext("/", exchange -> { - try { - Response response; + listen(socketAddress, exposePidAndPort); + // Wait to be killed + Thread.sleep(Long.MAX_VALUE); + } finally { + stop(); + } + } - // Check if this is a request made by the AntFixture - final String userAgent = exchange.getRequestHeaders().getFirst("User-Agent"); - if (userAgent != null - && userAgent.startsWith("Apache Ant") - && "GET".equals(exchange.getRequestMethod()) - && "/".equals(exchange.getRequestURI().getPath())) { - response = new Response(200, TEXT_PLAIN_CONTENT_TYPE, "OK".getBytes(UTF_8)); + public final void listen(InetSocketAddress socketAddress, boolean exposePidAndPort) throws IOException, InterruptedException { + httpServer = HttpServer.create(socketAddress, 0); + if (exposePidAndPort) { + /// Writes the PID of the current Java process in a `pid` file located in the working directory + writeFile(workingDirectory, "pid", ManagementFactory.getRuntimeMXBean().getName().split("@")[0]); - } else { - try { - final long requestId = requests.getAndIncrement(); - final String method = exchange.getRequestMethod(); + final String addressAndPort = addressToString(httpServer.getAddress()); + // Writes the address and port of the http server in a `ports` file located in the working directory + writeFile(workingDirectory, "ports", addressAndPort); + } - final Map headers = new HashMap<>(); - for (Map.Entry> header : exchange.getRequestHeaders().entrySet()) { - headers.put(header.getKey(), exchange.getRequestHeaders().getFirst(header.getKey())); - } + httpServer.createContext("/", exchange -> { + try { + Response response; + + // Check if this is a request made by the AntFixture + final String userAgent = exchange.getRequestHeaders().getFirst("User-Agent"); + if (userAgent != null + && userAgent.startsWith("Apache Ant") + && "GET".equals(exchange.getRequestMethod()) + && "/".equals(exchange.getRequestURI().getPath())) { + response = new Response(200, TEXT_PLAIN_CONTENT_TYPE, "OK".getBytes(UTF_8)); + + } else { + try { + final long requestId = requests.getAndIncrement(); + final String method = exchange.getRequestMethod(); + + final Map headers = new HashMap<>(); + for (Map.Entry> header : exchange.getRequestHeaders().entrySet()) { + headers.put(header.getKey(), exchange.getRequestHeaders().getFirst(header.getKey())); + } - final ByteArrayOutputStream body = new ByteArrayOutputStream(); - try (InputStream requestBody = exchange.getRequestBody()) { - final byte[] buffer = new byte[1024]; - int i; - while ((i = requestBody.read(buffer, 0, buffer.length)) != -1) { - body.write(buffer, 0, i); - } - body.flush(); + final ByteArrayOutputStream body = new ByteArrayOutputStream(); + try (InputStream requestBody = exchange.getRequestBody()) { + final byte[] buffer = new byte[1024]; + int i; + while ((i = requestBody.read(buffer, 0, buffer.length)) != -1) { + body.write(buffer, 0, i); } + body.flush(); + } - final Request request = new Request(requestId, method, exchange.getRequestURI(), headers, body.toByteArray()); - response = handle(request); + final Request request = new Request(requestId, method, exchange.getRequestURI(), headers, body.toByteArray()); + response = handle(request); - } catch (Exception e) { - final String error = e.getMessage() != null ? e.getMessage() : "Exception when processing the request"; - response = new Response(500, singletonMap("Content-Type", "text/plain; charset=utf-8"), error.getBytes(UTF_8)); - } + } catch (Exception e) { + final String error = e.getMessage() != null ? e.getMessage() : "Exception when processing the request"; + response = new Response(500, singletonMap("Content-Type", "text/plain; charset=utf-8"), error.getBytes(UTF_8)); } + } - if (response == null) { - response = new Response(400, TEXT_PLAIN_CONTENT_TYPE, EMPTY_BYTE); - } + if (response == null) { + response = new Response(400, TEXT_PLAIN_CONTENT_TYPE, EMPTY_BYTE); + } - response.headers.forEach((k, v) -> exchange.getResponseHeaders().put(k, singletonList(v))); - if (response.body.length > 0) { - exchange.sendResponseHeaders(response.status, response.body.length); - exchange.getResponseBody().write(response.body); - } else { - exchange.sendResponseHeaders(response.status, -1); - } - } finally { - exchange.close(); + response.headers.forEach((k, v) -> exchange.getResponseHeaders().put(k, singletonList(v))); + if (response.body.length > 0) { + exchange.sendResponseHeaders(response.status, response.body.length); + exchange.getResponseBody().write(response.body); + } else { + exchange.sendResponseHeaders(response.status, -1); } - }); - httpServer.start(); + } finally { + exchange.close(); + } + }); + httpServer.start(); + } - // Wait to be killed - Thread.sleep(Long.MAX_VALUE); + protected abstract Response handle(Request request) throws IOException; - } finally { + protected void stop() { + if (httpServer != null) { httpServer.stop(0); } } - protected abstract Response handle(Request request) throws IOException; + public String getAddress() { + return "http://127.0.0.1:" + httpServer.getAddress().getPort(); + } @FunctionalInterface public interface RequestHandler { From 5a635a2fd1c19be04fc9fc2776f63ac010fc0fe7 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 1 Dec 2023 10:46:22 +0000 Subject: [PATCH 129/263] Support for GET all models and by task type in the _inference API (#102806) Support wildcard GET by either task type or for all models. --- docs/changelog/102806.yaml | 5 + .../inference/InferenceService.java | 15 +- .../org/elasticsearch/inference/Model.java | 8 + .../action/GetInferenceModelAction.java | 9 +- .../inference/InferenceBaseRestTest.java | 131 +++++++++ .../xpack/inference/InferenceCrudIT.java | 69 +++++ .../inference/MockInferenceServiceIT.java | 107 +------- .../mock/TestInferenceServiceExtension.java | 15 +- .../integration/ModelRegistryIT.java | 254 ++++++++++++++++-- .../TransportGetInferenceModelAction.java | 78 +++++- .../action/TransportInferenceAction.java | 13 +- .../inference/registry/ModelRegistry.java | 148 +++++++++- .../rest/RestGetInferenceModelAction.java | 18 +- .../inference/services/MapParsingUtils.java | 9 +- .../services/elser/ElserMlNodeService.java | 7 +- .../elser/HuggingFaceElserService.java | 10 +- .../services/openai/OpenAiService.java | 28 +- .../action/GetInferenceModelRequestTests.java | 6 +- .../registry/ModelRegistryTests.java | 81 +++++- .../elser/ElserMlNodeServiceTests.java | 21 +- .../services/openai/OpenAiServiceTests.java | 56 +++- 21 files changed, 901 insertions(+), 187 deletions(-) create mode 100644 docs/changelog/102806.yaml create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java diff --git a/docs/changelog/102806.yaml b/docs/changelog/102806.yaml new file mode 100644 index 0000000000000..faa971ec1d879 --- /dev/null +++ b/docs/changelog/102806.yaml @@ -0,0 +1,5 @@ +pr: 102806 +summary: Support for GET all models and by task type in the `_inference` API +area: Machine Learning +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 80feebd435cb1..2f83310ea2388 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -55,7 +55,20 @@ default void init(Client client) {} * @param secrets Sensitive configuration options (e.g. api key) * @return The parsed {@link Model} */ - Model parsePersistedConfig(String modelId, TaskType taskType, Map config, Map secrets); + Model parsePersistedConfigWithSecrets(String modelId, TaskType taskType, Map config, Map secrets); + + /** + * Parse model configuration from {@code config map} from persisted storage and return the parsed {@link Model}. + * This function modifies {@code config map}, fields are removed from the map as they are read. + * + * If the map contains unrecognized configuration options, no error is thrown. + * + * @param modelId Model Id + * @param taskType The model task type + * @param config Configuration options + * @return The parsed {@link Model} + */ + Model parsePersistedConfig(String modelId, TaskType taskType, Map config); /** * Perform inference on the model. diff --git a/server/src/main/java/org/elasticsearch/inference/Model.java b/server/src/main/java/org/elasticsearch/inference/Model.java index eedb67a8111e5..02be39d8a653d 100644 --- a/server/src/main/java/org/elasticsearch/inference/Model.java +++ b/server/src/main/java/org/elasticsearch/inference/Model.java @@ -27,6 +27,14 @@ public Model(ModelConfigurations configurations) { this(configurations, new ModelSecrets()); } + public String getModelId() { + return configurations.getModelId(); + } + + public TaskType getTaskType() { + return configurations.getTaskType(); + } + /** * Returns the model's non-sensitive configurations (e.g. service name). */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java index 6e2b3a7a89e32..0343206994d2c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java @@ -38,14 +38,9 @@ public static class Request extends AcknowledgedRequest putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { + String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + var request = new Request("PUT", endpoint); + request.setJsonEntity(modelConfig); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + protected Map getModels(String modelId, TaskType taskType) throws IOException { + var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + var request = new Request("GET", endpoint); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + protected Map getAllModels() throws IOException { + var endpoint = Strings.format("_inference/_all"); + var request = new Request("GET", endpoint); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + protected Map inferOnMockService(String modelId, TaskType taskType, List input) throws IOException { + var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + var request = new Request("POST", endpoint); + + var bodyBuilder = new StringBuilder("{\"input\": ["); + for (var in : input) { + bodyBuilder.append('"').append(in).append('"').append(','); + } + // remove last comma + bodyBuilder.deleteCharAt(bodyBuilder.length() - 1); + bodyBuilder.append("]}"); + + request.setJsonEntity(bodyBuilder.toString()); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + @SuppressWarnings("unchecked") + protected void assertNonEmptyInferenceResults(Map resultMap, int expectedNumberOfResults, TaskType taskType) { + if (taskType == TaskType.SPARSE_EMBEDDING) { + var results = (List>) resultMap.get(TaskType.SPARSE_EMBEDDING.toString()); + assertThat(results, hasSize(expectedNumberOfResults)); + } else { + fail("test with task type [" + taskType + "] are not supported yet"); + } + } + + protected static void assertOkOrCreated(Response response) throws IOException { + int statusCode = response.getStatusLine().getStatusCode(); + // Once EntityUtils.toString(entity) is called the entity cannot be reused. + // Avoid that call with check here. + if (statusCode == 200 || statusCode == 201) { + return; + } + + String responseStr = EntityUtils.toString(response.getEntity()); + assertThat(responseStr, response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java new file mode 100644 index 0000000000000..61278fcae6d94 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.inference.TaskType; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasSize; + +public class InferenceCrudIT extends InferenceBaseRestTest { + + @SuppressWarnings("unchecked") + public void testGet() throws IOException { + for (int i = 0; i < 5; i++) { + putModel("se_model_" + i, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + } + for (int i = 0; i < 4; i++) { + putModel("te_model_" + i, mockServiceModelConfig(), TaskType.TEXT_EMBEDDING); + } + + var getAllModels = (List>) getAllModels().get("models"); + assertThat(getAllModels, hasSize(9)); + + var getSparseModels = (List>) getModels("_all", TaskType.SPARSE_EMBEDDING).get("models"); + assertThat(getSparseModels, hasSize(5)); + for (var sparseModel : getSparseModels) { + assertEquals("sparse_embedding", sparseModel.get("task_type")); + } + + var getDenseModels = (List>) getModels("_all", TaskType.TEXT_EMBEDDING).get("models"); + assertThat(getDenseModels, hasSize(4)); + for (var denseModel : getDenseModels) { + assertEquals("text_embedding", denseModel.get("task_type")); + } + + var singleModel = (List>) getModels("se_model_1", TaskType.SPARSE_EMBEDDING).get("models"); + assertThat(singleModel, hasSize(1)); + assertEquals("se_model_1", singleModel.get(0).get("model_id")); + } + + public void testGetModelWithWrongTaskType() throws IOException { + putModel("sparse_embedding_model", mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var e = expectThrows(ResponseException.class, () -> getModels("sparse_embedding_model", TaskType.TEXT_EMBEDDING)); + assertThat( + e.getMessage(), + containsString("Requested task type [text_embedding] does not match the model's task type [sparse_embedding]") + ); + } + + @SuppressWarnings("unchecked") + public void testGetModelWithAnyTaskType() throws IOException { + String modelId = "sparse_embedding_model"; + putModel(modelId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var singleModel = (List>) getModels(modelId, TaskType.ANY).get("models"); + System.out.println("MODEL" + singleModel); + assertEquals(modelId, singleModel.get(0).get("model_id")); + assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get(0).get("task_type")); + } +} diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java index 058c921152afc..f8abfd45a8566 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java @@ -7,63 +7,13 @@ package org.elasticsearch.xpack.inference; -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.junit.ClassRule; import java.io.IOException; import java.util.List; import java.util.Map; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; - -public class MockInferenceServiceIT extends ESRestTestCase { - - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.license.self_generated.type", "trial") - .setting("xpack.security.enabled", "true") - .plugin("org.elasticsearch.xpack.inference.mock.TestInferenceServicePlugin") - .user("x_pack_rest_user", "x-pack-test-password") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @Override - protected Settings restClientSettings() { - String token = basicAuthHeaderValue("x_pack_rest_user", new SecureString("x-pack-test-password".toCharArray())); - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); - } - - static String mockServiceModelConfig() { - return Strings.format(""" - { - "service": "test_service", - "service_settings": { - "model": "my_model", - "api_key": "abc64" - }, - "task_settings": { - "temperature": 3 - } - } - """); - } +public class MockInferenceServiceIT extends InferenceBaseRestTest { @SuppressWarnings("unchecked") public void testMockService() throws IOException { @@ -113,59 +63,4 @@ public void testMockService_DoesNotReturnSecretsInGetResponse() throws IOExcepti assertNull(putServiceSettings.get("api_key")); assertNotNull(putServiceSettings.get("model")); } - - private Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { - String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - var request = new Request("PUT", endpoint); - request.setJsonEntity(modelConfig); - var reponse = client().performRequest(request); - assertOkWithErrorMessage(reponse); - return entityAsMap(reponse); - } - - public Map getModels(String modelId, TaskType taskType) throws IOException { - var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - var request = new Request("GET", endpoint); - var reponse = client().performRequest(request); - assertOkWithErrorMessage(reponse); - return entityAsMap(reponse); - } - - private Map inferOnMockService(String modelId, TaskType taskType, List input) throws IOException { - var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - var request = new Request("POST", endpoint); - - var bodyBuilder = new StringBuilder("{\"input\": ["); - for (var in : input) { - bodyBuilder.append('"').append(in).append('"').append(','); - } - // remove last comma - bodyBuilder.deleteCharAt(bodyBuilder.length() - 1); - bodyBuilder.append("]}"); - - request.setJsonEntity(bodyBuilder.toString()); - var response = client().performRequest(request); - assertOkWithErrorMessage(response); - return entityAsMap(response); - } - - @SuppressWarnings("unchecked") - protected void assertNonEmptyInferenceResults(Map resultMap, int expectedNumberOfResults, TaskType taskType) { - if (taskType == TaskType.SPARSE_EMBEDDING) { - var results = (List>) resultMap.get(TaskType.SPARSE_EMBEDDING.toString()); - assertThat(results, hasSize(expectedNumberOfResults)); - } else { - fail("test with task type [" + taskType + "] are not supported yet"); - } - } - - protected static void assertOkWithErrorMessage(Response response) throws IOException { - int statusCode = response.getStatusLine().getStatusCode(); - if (statusCode == 200 || statusCode == 201) { - return; - } - - String responseStr = EntityUtils.toString(response.getEntity()); - assertThat(responseStr, response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); - } } diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java index 0804685aa2cb0..eee6f68c20ff7 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java @@ -87,7 +87,7 @@ public TestServiceModel parseRequestConfig( @Override @SuppressWarnings("unchecked") - public TestServiceModel parsePersistedConfig( + public TestServiceModel parsePersistedConfigWithSecrets( String modelId, TaskType taskType, Map config, @@ -105,6 +105,19 @@ public TestServiceModel parsePersistedConfig( return new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, secretSettings); } + @Override + @SuppressWarnings("unchecked") + public Model parsePersistedConfig(String modelId, TaskType taskType, Map config) { + var serviceSettingsMap = (Map) config.remove(ModelConfigurations.SERVICE_SETTINGS); + + var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); + + var taskSettingsMap = getTaskSettingsMap(config); + var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); + + return new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, null); + } + @Override public void infer( Model model, diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index 3c0db8228409e..50647ca328b23 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -8,11 +8,15 @@ package org.elasticsearch.xpack.inference.integration; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; @@ -21,7 +25,6 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.InferencePlugin; -import org.elasticsearch.xpack.inference.UnparsedModel; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeModel; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; @@ -31,13 +34,21 @@ import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import java.util.stream.Collectors; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -59,7 +70,7 @@ protected Collection> getPlugins() { public void testStoreModel() throws Exception { String modelId = "test-store-model"; - Model model = buildModelConfig(modelId, ElserMlNodeService.NAME, TaskType.SPARSE_EMBEDDING); + Model model = buildElserModelConfig(modelId, TaskType.SPARSE_EMBEDDING); AtomicReference storeModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -90,7 +101,7 @@ public void testStoreModelWithUnknownFields() throws Exception { public void testGetModel() throws Exception { String modelId = "test-get-model"; - Model model = buildModelConfig(modelId, ElserMlNodeService.NAME, TaskType.SPARSE_EMBEDDING); + Model model = buildElserModelConfig(modelId, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -98,27 +109,26 @@ public void testGetModel() throws Exception { assertThat(putModelHolder.get(), is(true)); // now get the model - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getUnparsedModelMap(modelId, listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets(modelId, listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), is(nullValue())); assertThat(modelHolder.get(), not(nullValue())); - UnparsedModel unparsedModel = UnparsedModel.unparsedModelFromMap(modelHolder.get().config(), modelHolder.get().secrets()); - assertEquals(model.getConfigurations().getService(), unparsedModel.service()); + assertEquals(model.getConfigurations().getService(), modelHolder.get().service()); var elserService = new ElserMlNodeService(new InferenceServiceExtension.InferenceServiceFactoryContext(mock(Client.class))); - ElserMlNodeModel roundTripModel = elserService.parsePersistedConfig( - unparsedModel.modelId(), - unparsedModel.taskType(), - unparsedModel.settings(), - unparsedModel.secrets() + ElserMlNodeModel roundTripModel = elserService.parsePersistedConfigWithSecrets( + modelHolder.get().modelId(), + modelHolder.get().taskType(), + modelHolder.get().settings(), + modelHolder.get().secrets() ); assertEquals(model, roundTripModel); } public void testStoreModelFailsWhenModelExists() throws Exception { String modelId = "test-put-trained-model-config-exists"; - Model model = buildModelConfig(modelId, ElserMlNodeService.NAME, TaskType.SPARSE_EMBEDDING); + Model model = buildElserModelConfig(modelId, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -140,7 +150,7 @@ public void testStoreModelFailsWhenModelExists() throws Exception { public void testDeleteModel() throws Exception { // put models for (var id : new String[] { "model1", "model2", "model3" }) { - Model model = buildModelConfig(id, ElserMlNodeService.NAME, TaskType.SPARSE_EMBEDDING); + Model model = buildElserModelConfig(id, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); @@ -155,19 +165,115 @@ public void testDeleteModel() throws Exception { // get should fail deleteResponseHolder.set(false); - AtomicReference modelHolder = new AtomicReference<>(); - blockingCall(listener -> modelRegistry.getUnparsedModelMap("model1", listener), modelHolder, exceptionHolder); + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets("model1", listener), modelHolder, exceptionHolder); assertThat(exceptionHolder.get(), not(nullValue())); assertFalse(deleteResponseHolder.get()); assertThat(exceptionHolder.get().getMessage(), containsString("Model not found [model1]")); } - private Model buildModelConfig(String modelId, String service, TaskType taskType) { - return switch (service) { - case ElserMlNodeService.NAME -> ElserMlNodeServiceTests.randomModelConfig(modelId, taskType); - default -> throw new IllegalArgumentException("unknown service " + service); - }; + public void testGetModelsByTaskType() throws InterruptedException { + var service = "foo"; + var sparseAndTextEmbeddingModels = new ArrayList(); + sparseAndTextEmbeddingModels.add(createModel(randomAlphaOfLength(5), TaskType.SPARSE_EMBEDDING, service)); + sparseAndTextEmbeddingModels.add(createModel(randomAlphaOfLength(5), TaskType.SPARSE_EMBEDDING, service)); + sparseAndTextEmbeddingModels.add(createModel(randomAlphaOfLength(5), TaskType.SPARSE_EMBEDDING, service)); + sparseAndTextEmbeddingModels.add(createModel(randomAlphaOfLength(5), TaskType.TEXT_EMBEDDING, service)); + sparseAndTextEmbeddingModels.add(createModel(randomAlphaOfLength(5), TaskType.TEXT_EMBEDDING, service)); + + for (var model : sparseAndTextEmbeddingModels) { + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + } + + AtomicReference exceptionHolder = new AtomicReference<>(); + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.SPARSE_EMBEDDING, listener), modelHolder, exceptionHolder); + assertThat(modelHolder.get(), hasSize(3)); + var sparseIds = sparseAndTextEmbeddingModels.stream() + .filter(m -> m.getConfigurations().getTaskType() == TaskType.SPARSE_EMBEDDING) + .map(Model::getModelId) + .collect(Collectors.toSet()); + modelHolder.get().forEach(m -> { + assertTrue(sparseIds.contains(m.modelId())); + assertThat(m.secrets().keySet(), empty()); + }); + + blockingCall(listener -> modelRegistry.getModelsByTaskType(TaskType.TEXT_EMBEDDING, listener), modelHolder, exceptionHolder); + assertThat(modelHolder.get(), hasSize(2)); + var denseIds = sparseAndTextEmbeddingModels.stream() + .filter(m -> m.getConfigurations().getTaskType() == TaskType.TEXT_EMBEDDING) + .map(Model::getModelId) + .collect(Collectors.toSet()); + modelHolder.get().forEach(m -> { + assertTrue(denseIds.contains(m.modelId())); + assertThat(m.secrets().keySet(), empty()); + }); + } + + public void testGetAllModels() throws InterruptedException { + var service = "foo"; + var createdModels = new ArrayList(); + int modelCount = randomIntBetween(30, 100); + + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + for (int i = 0; i < modelCount; i++) { + var model = createModel(randomAlphaOfLength(5), randomFrom(TaskType.values()), service); + createdModels.add(model); + + blockingCall(listener -> modelRegistry.storeModel(model, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + assertNull(exceptionHolder.get()); + } + + AtomicReference> modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getAllModels(listener), modelHolder, exceptionHolder); + assertThat(modelHolder.get(), hasSize(modelCount)); + var getAllModels = modelHolder.get(); + + // sort in the same order as the returned models + createdModels.sort(Comparator.comparing(Model::getModelId)); + for (int i = 0; i < modelCount; i++) { + assertEquals(createdModels.get(i).getModelId(), getAllModels.get(i).modelId()); + assertEquals(createdModels.get(i).getTaskType(), getAllModels.get(i).taskType()); + assertEquals(createdModels.get(i).getConfigurations().getService(), getAllModels.get(i).service()); + assertThat(getAllModels.get(i).secrets().keySet(), empty()); + } + } + + @SuppressWarnings("unchecked") + public void testGetModelWithSecrets() throws InterruptedException { + var service = "foo"; + var modelId = "model-with-secrets"; + var secret = "abc"; + + AtomicReference putModelHolder = new AtomicReference<>(); + AtomicReference exceptionHolder = new AtomicReference<>(); + + var modelWithSecrets = createModelWithSecrets(modelId, randomFrom(TaskType.values()), service, secret); + blockingCall(listener -> modelRegistry.storeModel(modelWithSecrets, listener), putModelHolder, exceptionHolder); + assertThat(putModelHolder.get(), is(true)); + assertNull(exceptionHolder.get()); + + AtomicReference modelHolder = new AtomicReference<>(); + blockingCall(listener -> modelRegistry.getModelWithSecrets(modelId, listener), modelHolder, exceptionHolder); + assertThat(modelHolder.get().secrets().keySet(), hasSize(1)); + var secretSettings = (Map) modelHolder.get().secrets().get("secret_settings"); + assertThat(secretSettings.get("secret"), equalTo(secret)); + + // get model without secrets + blockingCall(listener -> modelRegistry.getModel(modelId, listener), modelHolder, exceptionHolder); + assertThat(modelHolder.get().secrets().keySet(), empty()); + } + + private Model buildElserModelConfig(String modelId, TaskType taskType) { + return ElserMlNodeServiceTests.randomModelConfig(modelId, taskType); } protected void blockingCall(Consumer> function, AtomicReference response, AtomicReference error) @@ -197,6 +303,112 @@ private static Model buildModelWithUnknownField(String modelId) { ); } + public static Model createModel(String modelId, TaskType taskType, String service) { + return new Model(new ModelConfigurations(modelId, taskType, service, new TestModelOfAnyKind.TestModelServiceSettings())); + } + + public static Model createModelWithSecrets(String modelId, TaskType taskType, String service, String secret) { + return new Model( + new ModelConfigurations(modelId, taskType, service, new TestModelOfAnyKind.TestModelServiceSettings()), + new ModelSecrets(new TestModelOfAnyKind.TestSecretSettings(secret)) + ); + } + + private static class TestModelOfAnyKind extends ModelConfigurations { + + record TestModelServiceSettings() implements ServiceSettings { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return "test_service_settings"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + + } + } + + record TestTaskSettings() implements TaskSettings { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return "test_task_settings"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + + } + } + + record TestSecretSettings(String key) implements SecretSettings { + @Override + public String getWriteableName() { + return "test_secrets"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(key); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("secret", key); + builder.endObject(); + return builder; + } + } + + TestModelOfAnyKind(String modelId, TaskType taskType, String service) { + super(modelId, taskType, service, new TestModelServiceSettings(), new TestTaskSettings()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("unknown_field", "foo"); + builder.field(MODEL_ID, getModelId()); + builder.field(TaskType.NAME, getTaskType().toString()); + builder.field(SERVICE, getService()); + builder.field(SERVICE_SETTINGS, getServiceSettings()); + builder.field(TASK_SETTINGS, getTaskSettings()); + builder.endObject(); + return builder; + } + } + private static class ModelWithUnknownField extends ModelConfigurations { ModelWithUnknownField( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java index ddc70f4e8d846..52fc115d4a4a6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java @@ -9,19 +9,26 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; -import org.elasticsearch.xpack.inference.UnparsedModel; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import java.util.ArrayList; import java.util.List; +import java.util.concurrent.Executor; public class TransportGetInferenceModelAction extends HandledTransportAction< GetInferenceModelAction.Request, @@ -29,11 +36,13 @@ public class TransportGetInferenceModelAction extends HandledTransportAction< private final ModelRegistry modelRegistry; private final InferenceServiceRegistry serviceRegistry; + private final Executor executor; @Inject public TransportGetInferenceModelAction( TransportService transportService, ActionFilters actionFilters, + ThreadPool threadPool, ModelRegistry modelRegistry, InferenceServiceRegistry serviceRegistry ) { @@ -46,6 +55,7 @@ public TransportGetInferenceModelAction( ); this.modelRegistry = modelRegistry; this.serviceRegistry = serviceRegistry; + this.executor = threadPool.executor(InferencePlugin.UTILITY_THREAD_POOL_NAME); } @Override @@ -54,8 +64,19 @@ protected void doExecute( GetInferenceModelAction.Request request, ActionListener listener ) { - modelRegistry.getUnparsedModelMap(request.getModelId(), ActionListener.wrap(modelConfigMap -> { - var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config(), modelConfigMap.secrets()); + boolean modelIdIsWildCard = Strings.isAllOrWildcard(request.getModelId()); + + if (request.getTaskType() == TaskType.ANY && modelIdIsWildCard) { + getAllModels(listener); + } else if (modelIdIsWildCard) { + getModelsByTaskType(request.getTaskType(), listener); + } else { + getSingleModel(request.getModelId(), request.getTaskType(), listener); + } + } + + private void getSingleModel(String modelId, TaskType requestedTaskType, ActionListener listener) { + modelRegistry.getModel(modelId, ActionListener.wrap(unparsedModel -> { var service = serviceRegistry.getService(unparsedModel.service()); if (service.isEmpty()) { listener.onFailure( @@ -68,9 +89,56 @@ protected void doExecute( ); return; } - var model = service.get() - .parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings(), unparsedModel.secrets()); + + if (requestedTaskType.isAnyOrSame(unparsedModel.taskType()) == false) { + listener.onFailure( + new ElasticsearchStatusException( + "Requested task type [{}] does not match the model's task type [{}]", + RestStatus.BAD_REQUEST, + requestedTaskType, + unparsedModel.taskType() + ) + ); + return; + } + + var model = service.get().parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings()); listener.onResponse(new GetInferenceModelAction.Response(List.of(model.getConfigurations()))); }, listener::onFailure)); } + + private void getAllModels(ActionListener listener) { + modelRegistry.getAllModels( + ActionListener.wrap(models -> executor.execute(ActionRunnable.supply(listener, () -> parseModels(models))), listener::onFailure) + ); + } + + private void getModelsByTaskType(TaskType taskType, ActionListener listener) { + modelRegistry.getModelsByTaskType( + taskType, + ActionListener.wrap(models -> executor.execute(ActionRunnable.supply(listener, () -> parseModels(models))), listener::onFailure) + ); + } + + private GetInferenceModelAction.Response parseModels(List unparsedModels) { + var parsedModels = new ArrayList(); + + for (var unparsedModel : unparsedModels) { + var service = serviceRegistry.getService(unparsedModel.service()); + if (service.isEmpty()) { + throw new ElasticsearchStatusException( + "Unknown service [{}] for model [{}]. ", + RestStatus.INTERNAL_SERVER_ERROR, + unparsedModel.service(), + unparsedModel.modelId() + ); + } + parsedModels.add( + service.get() + .parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings()) + .getConfigurations() + ); + } + return new GetInferenceModelAction.Response(parsedModels); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java index 095ffa2635234..7fb86763ad534 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.inference.UnparsedModel; import org.elasticsearch.xpack.inference.registry.ModelRegistry; public class TransportInferenceAction extends HandledTransportAction { @@ -43,8 +42,7 @@ public TransportInferenceAction( @Override protected void doExecute(Task task, InferenceAction.Request request, ActionListener listener) { - ActionListener getModelListener = ActionListener.wrap(modelConfigMap -> { - var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config(), modelConfigMap.secrets()); + ActionListener getModelListener = ActionListener.wrap(unparsedModel -> { var service = serviceRegistry.getService(unparsedModel.service()); if (service.isEmpty()) { listener.onFailure( @@ -72,11 +70,16 @@ protected void doExecute(Task task, InferenceAction.Request request, ActionListe } var model = service.get() - .parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings(), unparsedModel.secrets()); + .parsePersistedConfigWithSecrets( + unparsedModel.modelId(), + unparsedModel.taskType(), + unparsedModel.settings(), + unparsedModel.secrets() + ); inferOnService(model, request, service.get(), listener); }, listener::onFailure); - modelRegistry.getUnparsedModelMap(request.getModelId(), getModelListener); + modelRegistry.getModelWithSecrets(request.getModelId(), getModelListener); } private void inferOnService( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 2937d4616571a..05c664f7ceeea 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -30,8 +30,11 @@ import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -39,9 +42,12 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.inference.InferenceIndex; import org.elasticsearch.xpack.inference.InferenceSecretsIndex; +import org.elasticsearch.xpack.inference.services.MapParsingUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; @@ -51,14 +57,47 @@ public class ModelRegistry { public record ModelConfigMap(Map config, Map secrets) {} + /** + * Semi parsed model where model id, task type and service + * are known but the settings are not parsed. + */ + public record UnparsedModel( + String modelId, + TaskType taskType, + String service, + Map settings, + Map secrets + ) { + + public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) { + if (modelConfigMap.config() == null) { + throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); + } + String modelId = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); + String service = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); + String taskTypeStr = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); + TaskType taskType = TaskType.fromString(taskTypeStr); + + return new UnparsedModel(modelId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); + } + } + + private static final String TASK_TYPE_FIELD = "task_type"; + private static final String MODEL_ID_FIELD = "model_id"; private static final Logger logger = LogManager.getLogger(ModelRegistry.class); + private final OriginSettingClient client; public ModelRegistry(Client client) { this.client = new OriginSettingClient(client, ClientHelper.INFERENCE_ORIGIN); } - public void getUnparsedModelMap(String modelId, ActionListener listener) { + /** + * Get a model with its secret settings + * @param modelId Model to get + * @param listener Model listener + */ + public void getModelWithSecrets(String modelId, ActionListener listener) { ActionListener searchListener = ActionListener.wrap(searchResponse -> { // There should be a hit for the configurations and secrets if (searchResponse.getHits().getHits().length == 0) { @@ -67,7 +106,7 @@ public void getUnparsedModelMap(String modelId, ActionListener l } var hits = searchResponse.getHits().getHits(); - listener.onResponse(createModelConfigMap(hits, modelId)); + listener.onResponse(UnparsedModel.unparsedModelFromMap(createModelConfigMap(hits, modelId))); }, listener::onFailure); @@ -80,6 +119,111 @@ public void getUnparsedModelMap(String modelId, ActionListener l client.search(modelSearch, searchListener); } + /** + * Get a model. + * Secret settings are not included + * @param modelId Model to get + * @param listener Model listener + */ + public void getModel(String modelId, ActionListener listener) { + ActionListener searchListener = ActionListener.wrap(searchResponse -> { + // There should be a hit for the configurations and secrets + if (searchResponse.getHits().getHits().length == 0) { + listener.onFailure(new ResourceNotFoundException("Model not found [{}]", modelId)); + return; + } + + var hits = searchResponse.getHits().getHits(); + var modelConfigs = parseHitsAsModels(hits).stream().map(UnparsedModel::unparsedModelFromMap).toList(); + assert modelConfigs.size() == 1; + listener.onResponse(modelConfigs.get(0)); + + }, listener::onFailure); + + QueryBuilder queryBuilder = documentIdQuery(modelId); + SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN) + .setQuery(queryBuilder) + .setSize(1) + .setTrackTotalHits(false) + .request(); + + client.search(modelSearch, searchListener); + } + + /** + * Get all models of a particular task type. + * Secret settings are not included + * @param taskType The task type + * @param listener Models listener + */ + public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { + ActionListener searchListener = ActionListener.wrap(searchResponse -> { + // Not an error if no models of this task_type + if (searchResponse.getHits().getHits().length == 0) { + listener.onResponse(List.of()); + return; + } + + var hits = searchResponse.getHits().getHits(); + var modelConfigs = parseHitsAsModels(hits).stream().map(UnparsedModel::unparsedModelFromMap).toList(); + listener.onResponse(modelConfigs); + + }, listener::onFailure); + + QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(TASK_TYPE_FIELD, taskType.toString())); + + SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN) + .setQuery(queryBuilder) + .setSize(10_000) + .setTrackTotalHits(false) + .addSort(MODEL_ID_FIELD, SortOrder.ASC) + .request(); + + client.search(modelSearch, searchListener); + } + + /** + * Get all models. + * Secret settings are not included + * @param listener Models listener + */ + public void getAllModels(ActionListener> listener) { + ActionListener searchListener = ActionListener.wrap(searchResponse -> { + // Not an error if no models of this task_type + if (searchResponse.getHits().getHits().length == 0) { + listener.onResponse(List.of()); + return; + } + + var hits = searchResponse.getHits().getHits(); + var modelConfigs = parseHitsAsModels(hits).stream().map(UnparsedModel::unparsedModelFromMap).toList(); + listener.onResponse(modelConfigs); + + }, listener::onFailure); + + // In theory the index should only contain model config documents + // and a match all query would be sufficient. But just in case the + // index has been polluted return only docs with a task_type field + QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.existsQuery(TASK_TYPE_FIELD)); + + SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN) + .setQuery(queryBuilder) + .setSize(10_000) + .setTrackTotalHits(false) + .addSort(MODEL_ID_FIELD, SortOrder.ASC) + .request(); + + client.search(modelSearch, searchListener); + } + + private List parseHitsAsModels(SearchHit[] hits) { + var modelConfigs = new ArrayList(); + for (var hit : hits) { + modelConfigs.add(new ModelConfigMap(hit.getSourceAsMap(), Map.of())); + } + return modelConfigs; + } + private ModelConfigMap createModelConfigMap(SearchHit[] hits, String modelId) { Map mappedHits = Arrays.stream(hits).collect(Collectors.toMap(hit -> { if (hit.getIndex().startsWith(InferenceIndex.INDEX_NAME)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index 98f0c1c1aeeb1..ce291bcf006ae 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -8,12 +8,12 @@ package org.elasticsearch.xpack.inference.rest; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; -import java.io.IOException; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -26,13 +26,21 @@ public String getName() { @Override public List routes() { - return List.of(new Route(GET, "_inference/{task_type}/{model_id}")); + return List.of(new Route(GET, "_inference/{task_type}/{model_id}"), new Route(GET, "_inference/_all")); } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String taskType = restRequest.param("task_type"); - String modelId = restRequest.param("model_id"); + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + String modelId = null; + TaskType taskType = null; + if (restRequest.hasParam("task_type") == false && restRequest.hasParam("model_id") == false) { + // _all models request + modelId = "_all"; + taskType = TaskType.ANY; + } else { + taskType = TaskType.fromStringOrStatusException(restRequest.param("task_type")); + modelId = restRequest.param("model_id"); + } var request = new GetInferenceModelAction.Request(modelId, taskType); return channel -> client.execute(GetInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java index 20bea7f1347b3..45bbddc92f135 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java @@ -61,9 +61,12 @@ public static Map removeFromMapOrThrowIfNull(Map return value; } - @SuppressWarnings("unchecked") - public static Map removeFromMap(Map sourceMap, String fieldName) { - return (Map) sourceMap.remove(fieldName); + public static String removeStringOrThrowIfNull(Map sourceMap, String key) { + String value = removeAsType(sourceMap, key, String.class); + if (value == null) { + throw new ElasticsearchStatusException("Missing required field [{}]", RestStatus.BAD_REQUEST, key); + } + return value; } public static void throwIfNotEmptyMap(Map settingsMap, String serviceName) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java index f9cc74550469c..048920356aca0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -100,12 +100,17 @@ public ElserMlNodeModel parseRequestConfig( } @Override - public ElserMlNodeModel parsePersistedConfig( + public ElserMlNodeModel parsePersistedConfigWithSecrets( String modelId, TaskType taskType, Map config, Map secrets ) { + return parsePersistedConfig(modelId, taskType, config); + } + + @Override + public ElserMlNodeModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); var serviceSettingsBuilder = ElserMlNodeServiceSettings.fromMap(serviceSettingsMap); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index 3aaa122e93fe9..8c978112c4ec3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -72,7 +72,7 @@ public HuggingFaceElserModel parseRequestConfig( } @Override - public HuggingFaceElserModel parsePersistedConfig( + public HuggingFaceElserModel parsePersistedConfigWithSecrets( String modelId, TaskType taskType, Map config, @@ -87,6 +87,14 @@ public HuggingFaceElserModel parsePersistedConfig( return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); } + @Override + public HuggingFaceElserModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); + + return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, null); + } + @Override public void infer(Model model, List input, Map taskSettings, ActionListener listener) { if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 1d2d123432ab8..0a7ae147d13d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -95,7 +95,12 @@ private OpenAiModel createModel( } @Override - public OpenAiModel parsePersistedConfig(String modelId, TaskType taskType, Map config, Map secrets) { + public OpenAiModel parsePersistedConfigWithSecrets( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); @@ -118,6 +123,27 @@ public OpenAiModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + + OpenAiModel model = createModel( + modelId, + taskType, + serviceSettingsMap, + taskSettingsMap, + null, + format("Failed to parse stored model [%s] for [%s] service, please delete and add the service again", modelId, NAME) + ); + + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + throwIfNotEmptyMap(taskSettingsMap, NAME); + + return model; + } + @Override public void infer(Model model, List input, Map taskSettings, ActionListener listener) { init(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java index dd422753faddb..091c11a480c0d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/GetInferenceModelRequestTests.java @@ -15,7 +15,7 @@ public class GetInferenceModelRequestTests extends AbstractWireSerializingTestCase { public static GetInferenceModelAction.Request randomTestInstance() { - return new GetInferenceModelAction.Request(randomAlphaOfLength(8), randomFrom(TaskType.values()).toString()); + return new GetInferenceModelAction.Request(randomAlphaOfLength(8), randomFrom(TaskType.values())); } @Override @@ -31,10 +31,10 @@ protected GetInferenceModelAction.Request createTestInstance() { @Override protected GetInferenceModelAction.Request mutateInstance(GetInferenceModelAction.Request instance) { return switch (randomIntBetween(0, 1)) { - case 0 -> new GetInferenceModelAction.Request(instance.getModelId() + "foo", instance.getTaskType().toString()); + case 0 -> new GetInferenceModelAction.Request(instance.getModelId() + "foo", instance.getTaskType()); case 1 -> { var nextTaskType = TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length]; - yield new GetInferenceModelAction.Request(instance.getModelId(), nextTaskType.toString()); + yield new GetInferenceModelAction.Request(instance.getModelId(), nextTaskType); } default -> throw new UnsupportedOperationException(); }; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java index 3b3134fe3d92e..b7d491bf54ddc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java @@ -18,8 +18,11 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESTestCase; @@ -29,12 +32,14 @@ import org.junit.After; import org.junit.Before; +import java.nio.ByteBuffer; import java.util.Map; import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -62,8 +67,8 @@ public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); - registry.getUnparsedModelMap("1", listener); + var listener = new PlainActionFuture(); + registry.getModelWithSecrets("1", listener); ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.actionGet(TIMEOUT)); assertThat(exception.getMessage(), is("Model not found [1]")); @@ -76,8 +81,8 @@ public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIn var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); - registry.getUnparsedModelMap("1", listener); + var listener = new PlainActionFuture(); + registry.getModelWithSecrets("1", listener); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -93,8 +98,8 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); - registry.getUnparsedModelMap("1", listener); + var listener = new PlainActionFuture(); + registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -110,8 +115,8 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); - registry.getUnparsedModelMap("1", listener); + var listener = new PlainActionFuture(); + registry.getModelWithSecrets("1", listener); IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -120,21 +125,69 @@ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFind ); } - public void testGetUnparsedModelMap_ReturnsModelConfigMap_WhenBothInferenceAndSecretsHitsAreFound() { + public void testGetModelWithSecrets() { var client = mockClient(); + String config = """ + { + "model_id": "1", + "task_type": "sparse_embedding", + "service": "foo" + } + """; + String secrets = """ + { + "api_key": "secret" + } + """; + var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + inferenceHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(config)))); var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".secrets-inference")); + inferenceSecretsHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(secrets)))); mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit, inferenceSecretsHit })); var registry = new ModelRegistry(client); - var listener = new PlainActionFuture(); - registry.getUnparsedModelMap("1", listener); + var listener = new PlainActionFuture(); + registry.getModelWithSecrets("1", listener); + + var modelConfig = listener.actionGet(TIMEOUT); + assertEquals("1", modelConfig.modelId()); + assertEquals("foo", modelConfig.service()); + assertEquals(TaskType.SPARSE_EMBEDDING, modelConfig.taskType()); + assertThat(modelConfig.settings().keySet(), empty()); + assertThat(modelConfig.secrets().keySet(), hasSize(1)); + assertEquals("secret", modelConfig.secrets().get("api_key")); + } + + public void testGetModelNoSecrets() { + var client = mockClient(); + String config = """ + { + "model_id": "1", + "task_type": "sparse_embedding", + "service": "foo" + } + """; + + var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference")); + inferenceHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(Strings.toUTF8Bytes(config)))); + + mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit })); + + var registry = new ModelRegistry(client); + + var listener = new PlainActionFuture(); + registry.getModel("1", listener); + registry.getModel("1", listener); var modelConfig = listener.actionGet(TIMEOUT); - assertThat(modelConfig.config(), nullValue()); - assertThat(modelConfig.secrets(), nullValue()); + assertEquals("1", modelConfig.modelId()); + assertEquals("foo", modelConfig.service()); + assertEquals(TaskType.SPARSE_EMBEDDING, modelConfig.taskType()); + assertThat(modelConfig.settings().keySet(), empty()); + assertThat(modelConfig.secrets().keySet(), empty()); } public void testStoreModel_ReturnsTrue_WhenNoFailuresOccur() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java index 4cce176e78c12..f8480709a3e40 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java @@ -127,7 +127,12 @@ public void testParseConfigStrictWithUnknownSettings() { containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") ); } else { - var parsed = service.parsePersistedConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Collections.emptyMap()); + var parsed = service.parsePersistedConfigWithSecrets( + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); } } @@ -158,7 +163,12 @@ public void testParseConfigStrictWithUnknownSettings() { containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") ); } else { - var parsed = service.parsePersistedConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Collections.emptyMap()); + var parsed = service.parsePersistedConfigWithSecrets( + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); } } @@ -190,7 +200,12 @@ public void testParseConfigStrictWithUnknownSettings() { containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") ); } else { - var parsed = service.parsePersistedConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Collections.emptyMap()); + var parsed = service.parsePersistedConfigWithSecrets( + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 9cd7a4b4eed2c..0d57e90dcd31b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -273,7 +273,12 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModel() throws IOE getSecretSettingsMap("secret") ); - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config(), persistedConfig.secrets()); + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); @@ -301,7 +306,12 @@ public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() thro var thrownException = expectThrows( ElasticsearchStatusException.class, - () -> service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config(), persistedConfig.secrets()) + () -> service.parsePersistedConfigWithSecrets( + "id", + TaskType.SPARSE_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ) ); assertThat( @@ -324,7 +334,12 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUr getSecretSettingsMap("secret") ); - var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config(), persistedConfig.secrets()); + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); @@ -353,7 +368,12 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws var thrownException = expectThrows( ElasticsearchStatusException.class, - () -> service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config(), persistedConfig.secrets()) + () -> service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ) ); assertThat( @@ -381,7 +401,12 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecretsSettings var thrownException = expectThrows( ElasticsearchStatusException.class, - () -> service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config(), persistedConfig.secrets()) + () -> service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ) ); assertThat( @@ -407,7 +432,12 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecrets() throw var thrownException = expectThrows( ElasticsearchStatusException.class, - () -> service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config(), persistedConfig.secrets()) + () -> service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ) ); assertThat( @@ -435,7 +465,12 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInServiceSettings var thrownException = expectThrows( ElasticsearchStatusException.class, - () -> service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config(), persistedConfig.secrets()) + () -> service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ) ); assertThat( @@ -463,7 +498,12 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInTaskSettings() var thrownException = expectThrows( ElasticsearchStatusException.class, - () -> service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config(), persistedConfig.secrets()) + () -> service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ) ); assertThat( From d68b6a399655c3836d4df54507e73243823e1bf1 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Fri, 1 Dec 2023 12:14:33 +0100 Subject: [PATCH 130/263] [Connector API] Implement update filtering action (#102809) --- .../api/connector.update_filtering.json | 39 +++ .../331_connector_update_scheduling.yml | 2 +- .../332_connector_update_filtering.yml | 278 ++++++++++++++++++ .../xpack/application/EnterpriseSearch.java | 9 +- .../connector/ConnectorIndexService.java | 31 ++ .../RestUpdateConnectorFilteringAction.java | 45 +++ ...ansportUpdateConnectorFilteringAction.java | 55 ++++ .../UpdateConnectorFilteringAction.java | 179 +++++++++++ .../UpdateConnectorSchedulingAction.java | 10 +- .../connector/ConnectorIndexServiceTests.java | 51 ++++ ...eringActionRequestBWCSerializingTests.java | 55 ++++ ...ringActionResponseBWCSerializingTests.java | 42 +++ .../xpack/security/operator/Constants.java | 1 + 13 files changed, 793 insertions(+), 4 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json new file mode 100644 index 0000000000000..6923dc88006e3 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json @@ -0,0 +1,39 @@ +{ + "connector.update_filtering": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the filtering field in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_filtering", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "A list of connector filtering configurations.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml index 21d588f538fc5..191ccbb6ba4bc 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml @@ -45,7 +45,7 @@ setup: - match: { scheduling.incremental.interval: "3 0 0 * * ?" } --- -"Update Connector Scheduling - 404 status code returned when connector doesn't exist": +"Update Connector Scheduling - Connector doesn't exist": - do: catch: "missing" connector.update_scheduling: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml new file mode 100644 index 0000000000000..aa735081f55e3 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml @@ -0,0 +1,278 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector +--- +"Update Connector Filtering": + - do: + connector.update_filtering: + connector_id: test-connector + body: + filtering: + - active: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2023-05-25T12:30:00.000Z" + field: _ + id: RULE-ACTIVE-0 + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + domain: DEFAULT + draft: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2023-05-25T12:30:00.000Z" + field: _ + id: RULE-DRAFT-0 + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + - active: + advanced_snippet: + created_at: "2021-05-25T12:30:00.000Z" + updated_at: "2021-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2021-05-25T12:30:00.000Z" + field: _ + id: RULE-ACTIVE-1 + order: 0 + policy: include + rule: regex + updated_at: "2021-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + domain: TEST + draft: + advanced_snippet: + created_at: "2021-05-25T12:30:00.000Z" + updated_at: "2021-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2021-05-25T12:30:00.000Z" + field: _ + id: RULE-DRAFT-1 + order: 0 + policy: exclude + rule: regex + updated_at: "2021-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { filtering.0.domain: DEFAULT } + - match: { filtering.0.active.advanced_snippet.created_at: "2023-05-25T12:30:00.000Z" } + - match: { filtering.0.active.rules.0.id: "RULE-ACTIVE-0" } + - match: { filtering.0.draft.rules.0.id: "RULE-DRAFT-0" } + + - match: { filtering.1.domain: TEST } + - match: { filtering.1.active.advanced_snippet.created_at: "2021-05-25T12:30:00.000Z" } + - match: { filtering.1.active.rules.0.id: "RULE-ACTIVE-1" } + - match: { filtering.1.draft.rules.0.id: "RULE-DRAFT-1" } + +--- +"Update Connector Filtering - 404 status code returned when connector doesn't exist": + - do: + catch: "missing" + connector.update_filtering: + connector_id: test-non-existent-connector + body: + filtering: + - active: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2023-05-25T12:30:00.000Z" + field: _ + id: RULE-ACTIVE-0 + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + domain: DEFAULT + draft: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2023-05-25T12:30:00.000Z" + field: _ + id: RULE-DRAFT-0 + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + - active: + advanced_snippet: + created_at: "2021-05-25T12:30:00.000Z" + updated_at: "2021-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2021-05-25T12:30:00.000Z" + field: _ + id: RULE-ACTIVE-1 + order: 0 + policy: include + rule: regex + updated_at: "2021-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + domain: TEST + draft: + advanced_snippet: + created_at: "2021-05-25T12:30:00.000Z" + updated_at: "2021-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2021-05-25T12:30:00.000Z" + field: _ + id: RULE-DRAFT-1 + order: 0 + policy: exclude + rule: regex + updated_at: "2021-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + +--- +"Update Connector Filtering - 400 status code returned when required fields are missing": + - do: + catch: "bad_request" + connector.update_filtering: + connector_id: test-connector + body: + filtering: + - domain: some_domain + + - match: + status: 400 + +--- +"Update Connector Filtering - 400 status code returned with wrong datetime expression": + - do: + catch: "bad_request" + connector.update_filtering: + connector_id: test-connector + body: + filtering: + - active: + advanced_snippet: + created_at: "this-is-not-a-datetime-!!!!" + updated_at: "2023-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2023-05-25T12:30:00.000Z" + field: _ + id: RULE-ACTIVE-0 + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + domain: DEFAULT + draft: + advanced_snippet: + created_at: "2023-05-25T12:30:00.000Z" + updated_at: "2023-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2023-05-25T12:30:00.000Z" + field: _ + id: RULE-DRAFT-0 + order: 0 + policy: include + rule: regex + updated_at: "2023-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + - active: + advanced_snippet: + created_at: "2021-05-25T12:30:00.000Z" + updated_at: "2021-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2021-05-25T12:30:00.000Z" + field: _ + id: RULE-ACTIVE-1 + order: 0 + policy: include + rule: regex + updated_at: "2021-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid + domain: TEST + draft: + advanced_snippet: + created_at: "2021-05-25T12:30:00.000Z" + updated_at: "2021-05-25T12:30:00.000Z" + value: {} + rules: + - created_at: "2021-05-25T12:30:00.000Z" + field: _ + id: RULE-DRAFT-1 + order: 0 + policy: exclude + rule: regex + updated_at: "2021-05-25T12:30:00.000Z" + value: ".*" + validation: + errors: [] + state: valid diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 26ac6dc9b939d..7d1d2f2204910 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -50,12 +50,15 @@ import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; @@ -178,11 +181,12 @@ protected XPackLicenseState getLicenseState() { if (ConnectorAPIFeature.isEnabled()) { actionHandlers.addAll( List.of( - // Connector API + // Connectors API new ActionHandler<>(DeleteConnectorAction.INSTANCE, TransportDeleteConnectorAction.class), new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), + new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), // SyncJob API @@ -238,11 +242,12 @@ public List getRestHandlers( if (ConnectorAPIFeature.isEnabled()) { restHandlers.addAll( List.of( - // Connector API + // Connectors API new RestDeleteConnectorAction(), new RestGetConnectorAction(), new RestListConnectorAction(), new RestPutConnectorAction(), + new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorSchedulingAction(), // SyncJob API diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index d632a28d3f858..6ca575580e9fd 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import java.util.Arrays; @@ -169,6 +170,36 @@ public void onFailure(Exception e) { } } + /** + * Updates the {@link ConnectorFiltering} property of a {@link Connector}. + * + * @param request Request for updating connector filtering property. + * @param listener Listener to respond to a successful response or an error. + */ + public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Updates the {@link ConnectorScheduling} property of a {@link Connector}. * diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java new file mode 100644 index 0000000000000..63ae3e81fe563 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorFilteringAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_filtering_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_filtering")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorFilteringAction.Request request = UpdateConnectorFilteringAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorFilteringAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorFilteringAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringAction.java new file mode 100644 index 0000000000000..e871eb4bb79e5 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorFilteringAction extends HandledTransportAction< + UpdateConnectorFilteringAction.Request, + UpdateConnectorFilteringAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorFilteringAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorFilteringAction.NAME, + transportService, + actionFilters, + UpdateConnectorFilteringAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorFilteringAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorFiltering( + request, + listener.map(r -> new UpdateConnectorFilteringAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java new file mode 100644 index 0000000000000..43d71fa40c436 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java @@ -0,0 +1,179 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorFiltering; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class UpdateConnectorFilteringAction extends ActionType { + + public static final UpdateConnectorFilteringAction INSTANCE = new UpdateConnectorFilteringAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_filtering"; + + public UpdateConnectorFilteringAction() { + super(NAME, UpdateConnectorFilteringAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + private final List filtering; + + public Request(String connectorId, List filtering) { + this.connectorId = connectorId; + this.filtering = filtering; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.filtering = in.readOptionalCollectionAsList(ConnectorFiltering::new); + } + + public String getConnectorId() { + return connectorId; + } + + public List getFiltering() { + return filtering; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "connector_update_filtering_request", + false, + ((args, connectorId) -> new UpdateConnectorFilteringAction.Request(connectorId, (List) args[0])) + ); + + static { + PARSER.declareObjectArray(constructorArg(), (p, c) -> ConnectorFiltering.fromXContent(p), Connector.FILTERING_FIELD); + } + + public static UpdateConnectorFilteringAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorFilteringAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorFilteringAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.FILTERING_FIELD.getPreferredName(), filtering); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalCollection(filtering); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(filtering, request.filtering); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, filtering); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java index eb0e265c44f28..dfca87b8324cb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -30,6 +31,7 @@ import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class UpdateConnectorSchedulingAction extends ActionType { @@ -67,7 +69,13 @@ public ConnectorScheduling getScheduling() { @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + return validationException; } private static final ConstructingObjectParser PARSER = diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 5d0d539262f10..854e80ad1bf2d 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; @@ -22,6 +23,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; @@ -61,6 +64,29 @@ public void testDeleteConnector() throws Exception { expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete)); } + public void testUpdateConnectorFiltering() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + List filteringList = IntStream.range(0, 10) + .mapToObj((i) -> ConnectorTestUtils.getRandomConnectorFiltering()) + .collect(Collectors.toList()); + + UpdateConnectorFilteringAction.Request updateFilteringRequest = new UpdateConnectorFilteringAction.Request( + connector.getConnectorId(), + filteringList + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorFiltering(updateFilteringRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + + assertThat(filteringList, equalTo(indexedConnector.getFiltering())); + } + public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); DocWriteResponse resp = awaitPutConnector(connector); @@ -180,6 +206,31 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorFiltering(UpdateConnectorFilteringAction.Request updateFiltering) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorFiltering(updateFiltering, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update filtering request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update filtering request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorScheduling(UpdateConnectorSchedulingAction.Request updatedScheduling) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..1d433d58be6ad --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionRequestBWCSerializingTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; +import java.util.List; + +public class UpdateConnectorFilteringActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorFilteringAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorFilteringAction.Request::new; + } + + @Override + protected UpdateConnectorFilteringAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorFilteringAction.Request( + connectorId, + List.of(ConnectorTestUtils.getRandomConnectorFiltering(), ConnectorTestUtils.getRandomConnectorFiltering()) + ); + } + + @Override + protected UpdateConnectorFilteringAction.Request mutateInstance(UpdateConnectorFilteringAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorFilteringAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorFilteringAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorFilteringAction.Request mutateInstanceForVersion( + UpdateConnectorFilteringAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..0f33eeac8dfb5 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionResponseBWCSerializingTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorFilteringActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorFilteringAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorFilteringAction.Response::new; + } + + @Override + protected UpdateConnectorFilteringAction.Response createTestInstance() { + return new UpdateConnectorFilteringAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorFilteringAction.Response mutateInstance(UpdateConnectorFilteringAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorFilteringAction.Response mutateInstanceForVersion( + UpdateConnectorFilteringAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 0164ee8f6122e..7863a50b764e7 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -129,6 +129,7 @@ public class Constants { "cluster:admin/xpack/connector/put", "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/delete", + "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", From 53bc8cc2cb033132e6976ecdee7c0c78731d485a Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Fri, 1 Dec 2023 13:24:05 +0200 Subject: [PATCH 131/263] updating (#102853) Temporarily muting test - relates to https://github.com/elastic/elasticsearch/issues/102257 --- .../java/org/elasticsearch/search/SearchCancellationIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index c41984b468b11..82e9ad4dc6cde 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.search; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.MultiSearchResponse; @@ -50,6 +51,7 @@ import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102257") public class SearchCancellationIT extends AbstractSearchCancellationTestCase { @Override From 892b2ff5819a0257a4156473a592f7c498f3cba5 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 1 Dec 2023 13:54:01 +0100 Subject: [PATCH 132/263] [Enterprise Search] Add check in sync job endpoint (#102842) Add check in Connector Sync Job Endpoint. --- .../api/connector_sync_job.check_in.json | 32 +++++ .../420_connector_sync_job_check_in.yml | 36 ++++++ .../xpack/application/EnterpriseSearch.java | 9 +- .../syncjob/ConnectorSyncJobConstants.java | 18 +++ .../syncjob/ConnectorSyncJobIndexService.java | 45 ++++++- .../action/CheckInConnectorSyncJobAction.java | 111 ++++++++++++++++++ .../action/DeleteConnectorSyncJobAction.java | 8 +- .../RestCheckInConnectorSyncJobAction.java | 46 ++++++++ .../RestDeleteConnectorSyncJobAction.java | 4 +- ...ransportCheckInConnectorSyncJobAction.java | 49 ++++++++ .../ConnectorSyncJobIndexServiceTests.java | 98 +++++++++++++++- .../syncjob/ConnectorSyncJobTestUtils.java | 5 + ...ncJobActionRequestBWCSerializingTests.java | 47 ++++++++ .../CheckInConnectorSyncJobActionTests.java | 36 ++++++ .../DeleteConnectorSyncJobActionTests.java | 3 +- ...ortCheckInConnectorSyncJobActionTests.java | 74 ++++++++++++ .../xpack/security/operator/Constants.java | 1 + 17 files changed, 607 insertions(+), 15 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobConstants.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json new file mode 100644 index 0000000000000..6c406a3a3d2c1 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json @@ -0,0 +1,32 @@ +{ + "connector_sync_job.check_in": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Checks in a connector sync job (refreshes 'last_seen')." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}/_check_in", + "methods": [ + "PUT" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to be checked in" + } + } + } + ] + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml new file mode 100644 index 0000000000000..9ef37f4a9fe60 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml @@ -0,0 +1,36 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +"Check in a Connector Sync Job": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: sync-job-id-to-check-in } + - do: + connector_sync_job.check_in: + connector_sync_job_id: $sync-job-id-to-check-in + + - match: { acknowledged: true } + + +--- +"Check in a Connector Sync Job - Connector Sync Job does not exist": + - do: + connector_sync_job.check_in: + connector_sync_job_id: test-nonexistent-connector-sync-job-id + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 7d1d2f2204910..0978e4cc08f21 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -60,10 +60,13 @@ import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; @@ -191,7 +194,8 @@ protected XPackLicenseState getLicenseState() { // SyncJob API new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class), - new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class) + new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), + new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class) ) ); } @@ -252,7 +256,8 @@ public List getRestHandlers( // SyncJob API new RestPostConnectorSyncJobAction(), - new RestDeleteConnectorSyncJobAction() + new RestDeleteConnectorSyncJobAction(), + new RestCheckInConnectorSyncJobAction() ) ); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobConstants.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobConstants.java new file mode 100644 index 0000000000000..cf44ab4e733c8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobConstants.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob; + +import static org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction.Request.CONNECTOR_SYNC_JOB_ID_FIELD; + +public class ConnectorSyncJobConstants { + + public static final String EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE = + "[connector_sync_job_id] of the connector sync job cannot be null or empty."; + public static final String CONNECTOR_SYNC_JOB_ID_PARAM = CONNECTOR_SYNC_JOB_ID_FIELD.getPreferredName(); + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 5deb63fd60669..e3276249a06b7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -19,10 +19,13 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.common.UUIDs; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; @@ -177,7 +180,7 @@ public void deleteConnectorSyncJob(String connectorSyncJobId, ActionListener(connectorSyncJobId, listener, (l, deleteResponse) -> { + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, deleteResponse) -> { if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); return; @@ -190,16 +193,50 @@ public void deleteConnectorSyncJob(String connectorSyncJobId, ActionListener listener) { + Instant newLastSeen = Instant.now(); + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, connectorSyncJobId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ).doc(Map.of(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName(), newLastSeen)); + + try { + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Listeners that checks failures for IndexNotFoundException, and transforms them in ResourceNotFoundException, * invoking onFailure on the delegate listener */ - static class DelegatingIndexNotFoundActionListener extends DelegatingActionListener { + static class DelegatingIndexNotFoundOrDocumentMissingActionListener extends DelegatingActionListener { private final BiConsumer, T> bc; private final String connectorSyncJobId; - DelegatingIndexNotFoundActionListener(String connectorSyncJobId, ActionListener delegate, BiConsumer, T> bc) { + DelegatingIndexNotFoundOrDocumentMissingActionListener( + String connectorSyncJobId, + ActionListener delegate, + BiConsumer, T> bc + ) { super(delegate); this.bc = bc; this.connectorSyncJobId = connectorSyncJobId; @@ -213,7 +250,7 @@ public void onResponse(T t) { @Override public void onFailure(Exception e) { Throwable cause = ExceptionsHelper.unwrapCause(e); - if (cause instanceof IndexNotFoundException) { + if (cause instanceof IndexNotFoundException || cause instanceof DocumentMissingException) { delegate.onFailure(new ResourceNotFoundException("connector sync job [" + connectorSyncJobId + "] not found")); return; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java new file mode 100644 index 0000000000000..3e5e1578cd54d --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class CheckInConnectorSyncJobAction extends ActionType { + + public static final CheckInConnectorSyncJobAction INSTANCE = new CheckInConnectorSyncJobAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/check_in"; + + private CheckInConnectorSyncJobAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends ActionRequest implements ToXContentObject { + public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); + private final String connectorSyncJobId; + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + } + + public Request(String connectorSyncJobId) { + this.connectorSyncJobId = connectorSyncJobId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError( + ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, + validationException + ); + } + + return validationException; + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJobId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CONNECTOR_SYNC_JOB_ID_FIELD.getPreferredName(), connectorSyncJobId); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "check_in_connector_sync_job_request", + false, + (args) -> new Request((String) args[0]) + ); + + static { + PARSER.declareString(constructorArg(), CONNECTOR_SYNC_JOB_ID_FIELD); + } + + public static CheckInConnectorSyncJobAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java index 147f8784a8ec7..05cd6cce90fdd 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; import java.io.IOException; import java.util.Objects; @@ -36,8 +37,6 @@ private DeleteConnectorSyncJobAction() { } public static class Request extends ActionRequest implements ToXContentObject { - public static final String EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE = - "[connector_sync_job_id] of the connector sync job cannot be null or empty."; public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); private final String connectorSyncJobId; @@ -56,7 +55,10 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (Strings.isNullOrEmpty(connectorSyncJobId)) { - validationException = addValidationError(EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, validationException); + validationException = addValidationError( + ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, + validationException + ); } return validationException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java new file mode 100644 index 0000000000000..86f97f4c5fdb4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; + +public class RestCheckInConnectorSyncJobAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_sync_job_check_in_action"; + } + + @Override + public List routes() { + return List.of( + new Route( + RestRequest.Method.PUT, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}/_check_in" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + CheckInConnectorSyncJobAction.Request request = new CheckInConnectorSyncJobAction.Request( + restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM) + ); + + return restChannel -> client.execute(CheckInConnectorSyncJobAction.INSTANCE, request, new RestToXContentListener<>(restChannel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java index 283675f89d1db..c1f352a341cc3 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestDeleteConnectorSyncJobAction.java @@ -16,12 +16,10 @@ import java.io.IOException; import java.util.List; -import static org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction.Request.CONNECTOR_SYNC_JOB_ID_FIELD; +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; public class RestDeleteConnectorSyncJobAction extends BaseRestHandler { - private static final String CONNECTOR_SYNC_JOB_ID_PARAM = CONNECTOR_SYNC_JOB_ID_FIELD.getPreferredName(); - @Override public String getName() { return "connector_sync_job_delete_action"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobAction.java new file mode 100644 index 0000000000000..ebaadc80f4c27 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportCheckInConnectorSyncJobAction extends HandledTransportAction< + CheckInConnectorSyncJobAction.Request, + AcknowledgedResponse> { + + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportCheckInConnectorSyncJobAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + CheckInConnectorSyncJobAction.NAME, + transportService, + actionFilters, + CheckInConnectorSyncJobAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute(Task task, CheckInConnectorSyncJobAction.Request request, ActionListener listener) { + connectorSyncJobIndexService.checkInConnectorSyncJob(request.getConnectorSyncJobId(), listener.map(r -> AcknowledgedResponse.TRUE)); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 9ac1f4935c6cc..6904f3b2760fa 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.ToXContent; @@ -28,6 +29,7 @@ import org.junit.Before; import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -38,12 +40,16 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; public class ConnectorSyncJobIndexServiceTests extends ESSingleNodeTestCase { private static final String NON_EXISTING_CONNECTOR_ID = "non-existing-connector-id"; + private static final String NON_EXISTING_SYNC_JOB_ID = "non-existing-sync-job-id"; + private static final String LAST_SEEN_FIELD_NAME = ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName(); private static final int TIMEOUT_SECONDS = 10; + private static final int ONE_SECOND_IN_MILLIS = 1000; private ConnectorSyncJobIndexService connectorSyncJobIndexService; private Connector connector; @@ -169,7 +175,71 @@ public void testDeleteConnectorSyncJob() throws Exception { } public void testDeleteConnectorSyncJob_WithMissingSyncJobId_ExpectException() { - expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnectorSyncJob("non-existing-sync-job-id")); + expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); + } + + public void testCheckInConnectorSyncJob() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + Instant lastSeenBeforeUpdate = Instant.parse((String) syncJobSourceBeforeUpdate.get(LAST_SEEN_FIELD_NAME)); + + safeSleep(ONE_SECOND_IN_MILLIS); + + UpdateResponse updateResponse = awaitCheckInConnectorSyncJob(syncJobId); + Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); + Instant lastSeenAfterUpdate = Instant.parse((String) syncJobSourceAfterUpdate.get(LAST_SEEN_FIELD_NAME)); + long secondsBetweenLastSeenBeforeAndAfterUpdate = ChronoUnit.SECONDS.between(lastSeenBeforeUpdate, lastSeenAfterUpdate); + + assertThat("Wrong sync job was updated", syncJobId, equalTo(updateResponse.getId())); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertTrue( + "[" + LAST_SEEN_FIELD_NAME + "] after the check in is not after [" + LAST_SEEN_FIELD_NAME + "] before the check in", + lastSeenAfterUpdate.isAfter(lastSeenBeforeUpdate) + ); + assertThat( + "there must be at least one second between [" + + LAST_SEEN_FIELD_NAME + + "] after the check in and [" + + LAST_SEEN_FIELD_NAME + + "] before the check in", + secondsBetweenLastSeenBeforeAndAfterUpdate, + greaterThanOrEqualTo(1L) + ); + assertFieldsExceptLastSeenDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); + } + + public void testCheckInConnectorSyncJob_WithMissingSyncJobId_ExpectException() { + expectThrows(ResourceNotFoundException.class, () -> awaitCheckInConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); + } + + private static void assertFieldsExceptLastSeenDidNotUpdate( + Map syncJobSourceBeforeUpdate, + Map syncJobSourceAfterUpdate + ) { + for (Map.Entry field : syncJobSourceBeforeUpdate.entrySet()) { + String fieldName = field.getKey(); + boolean isNotLastSeen = fieldName.equals(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) == false; + + if (isNotLastSeen) { + Object fieldValueBeforeUpdate = field.getValue(); + Object fieldValueAfterUpdate = syncJobSourceAfterUpdate.get(fieldName); + + assertThat( + "Every field except [" + + LAST_SEEN_FIELD_NAME + + "] should stay the same when checking in a sync job. [" + + fieldName + + "] did change.", + fieldValueBeforeUpdate, + equalTo(fieldValueAfterUpdate) + ); + } + } } private Map getConnectorSyncJobSourceById(String syncJobId) throws ExecutionException, InterruptedException, @@ -180,6 +250,31 @@ private Map getConnectorSyncJobSourceById(String syncJobId) thro return getResponseActionFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getSource(); } + private UpdateResponse awaitCheckInConnectorSyncJob(String connectorSyncJobId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorSyncJobIndexService.checkInConnectorSyncJob(connectorSyncJobId, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse updateResponse) { + resp.set(updateResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for check in request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from check in request", resp.get()); + return resp.get(); + } + private void awaitPutConnectorSyncJobExpectingException( PostConnectorSyncJobAction.Request syncJobRequest, ActionListener listener @@ -262,5 +357,4 @@ public void onFailure(Exception e) { return response; } - } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 099173735edd2..e9ff95967e626 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -9,6 +9,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; @@ -90,4 +91,8 @@ public static PostConnectorSyncJobAction.Request getRandomPostConnectorSyncJobAc public static PostConnectorSyncJobAction.Response getRandomPostConnectorSyncJobActionResponse() { return new PostConnectorSyncJobAction.Response(randomAlphaOfLength(10)); } + + public static CheckInConnectorSyncJobAction.Request getRandomCheckInConnectorSyncJobActionRequest() { + return new CheckInConnectorSyncJobAction.Request(randomAlphaOfLength(10)); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..63f874b32f37c --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobActionRequestBWCSerializingTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class CheckInConnectorSyncJobActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + CheckInConnectorSyncJobAction.Request> { + @Override + protected Writeable.Reader instanceReader() { + return CheckInConnectorSyncJobAction.Request::new; + } + + @Override + protected CheckInConnectorSyncJobAction.Request createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomCheckInConnectorSyncJobActionRequest(); + } + + @Override + protected CheckInConnectorSyncJobAction.Request mutateInstance(CheckInConnectorSyncJobAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected CheckInConnectorSyncJobAction.Request doParseInstance(XContentParser parser) throws IOException { + return CheckInConnectorSyncJobAction.Request.parse(parser); + } + + @Override + protected CheckInConnectorSyncJobAction.Request mutateInstanceForVersion( + CheckInConnectorSyncJobAction.Request instance, + TransportVersion version + ) { + return new CheckInConnectorSyncJobAction.Request(instance.getConnectorSyncJobId()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..fe5046e42f828 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobActionTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class CheckInConnectorSyncJobActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSyncJobIdIsPresent_ExpectNoValidationError() { + CheckInConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomCheckInConnectorSyncJobActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExpectValidationError() { + CheckInConnectorSyncJobAction.Request requestWithMissingConnectorSyncJobId = new CheckInConnectorSyncJobAction.Request(""); + ActionRequestValidationException exception = requestWithMissingConnectorSyncJobId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionTests.java index ee79db86152c6..00dff3e83211b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/DeleteConnectorSyncJobActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; import static org.hamcrest.Matchers.containsString; @@ -28,7 +29,7 @@ public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExpectValidationError() { ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); assertThat(exception, notNullValue()); - assertThat(exception.getMessage(), containsString(DeleteConnectorSyncJobAction.Request.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + assertThat(exception.getMessage(), containsString(ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..d88a246b6d5e2 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobActionTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportCheckInConnectorSyncJobActionTests extends ESSingleNodeTestCase { + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportCheckInConnectorSyncJobAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportCheckInConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testCheckInConnectorSyncJob_ExpectNoWarnings() throws InterruptedException { + CheckInConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomCheckInConnectorSyncJobActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(CheckInConnectorSyncJobAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for checkin request", requestTimedOut); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 7863a50b764e7..67c0b2d2ca41d 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -129,6 +129,7 @@ public class Constants { "cluster:admin/xpack/connector/put", "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/delete", + "cluster:admin/xpack/connector/sync_job/check_in", "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/deprecation/info", From 640687a8ef936fa97870c0773e87e47584ccc8ee Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 1 Dec 2023 14:54:11 +0100 Subject: [PATCH 133/263] Decref CacheFileRegion after read is done not before (#102848) The CacheFileRegion instance is decref before the read operation is executed, meaning that the SharedBytes.IO instance can return to the pool of free regions, being polled and written by another thread for another cache file region, before the first read is effectively completed (and will return incorrect bytes). --- docs/changelog/102848.yaml | 5 +++++ .../blobcache/shared/SharedBlobCacheService.java | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/102848.yaml diff --git a/docs/changelog/102848.yaml b/docs/changelog/102848.yaml new file mode 100644 index 0000000000000..971d91a878579 --- /dev/null +++ b/docs/changelog/102848.yaml @@ -0,0 +1,5 @@ +pr: 102848 +summary: Decref `SharedBytes.IO` after read is done not before +area: Snapshot/Restore +type: bug +issues: [] diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 847779c9066c4..695e96850e8e1 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -681,7 +681,7 @@ void populateAndRead( final List gaps = tracker.waitForRange( rangeToWrite, rangeToRead, - ActionListener.runBefore(listener, resource::close).delegateFailureAndWrap((l, success) -> { + ActionListener.runAfter(listener, resource::close).delegateFailureAndWrap((l, success) -> { var ioRef = io; assert regionOwners.get(ioRef) == this; final int start = Math.toIntExact(rangeToRead.start()); From c622dad8ddcf6fa6893d9be919b8ecba4016e0d1 Mon Sep 17 00:00:00 2001 From: Jorge Sanz Date: Fri, 1 Dec 2023 15:20:42 +0100 Subject: [PATCH 134/263] [Docs] Move coordinate note for geojson/wkt up to the beginning of the geo_shape page (#102857) * Move coordinate note for geojson/wkt up to the beginning of the page * Add links to GeoJSON and WKT specs --- docs/reference/mapping/types/geo-shape.asciidoc | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 37ef340733932..628f764c04fe9 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -30,6 +30,15 @@ The `geo_shape` mapping maps GeoJSON or WKT geometry objects to the `geo_shape` type. To enable it, users must explicitly map fields to the `geo_shape` type. +[NOTE] +============================================= +In https://datatracker.ietf.org/doc/html/rfc7946[GeoJSON] +and https://www.ogc.org/standard/sfa/[WKT], and therefore Elasticsearch, +the correct *coordinate order is longitude, latitude (X, Y)* within coordinate +arrays. This differs from many Geospatial APIs (e.g., Google Maps) that generally +use the colloquial latitude, longitude (Y, X). +============================================= + [cols="<,<,<",options="header",] |======================================================================= |Option |Description| Default @@ -142,11 +151,6 @@ specifying only the top left and bottom right points. ============================================= For all types, both the inner `type` and `coordinates` fields are required. - -In GeoJSON and WKT, and therefore Elasticsearch, the correct *coordinate -order is longitude, latitude (X, Y)* within coordinate arrays. This -differs from many Geospatial APIs (e.g., Google Maps) that generally -use the colloquial latitude, longitude (Y, X). ============================================= [[geo-point-type]] From f11ed3c596244da97b6939120a7e389f149cda94 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Fri, 1 Dec 2023 16:23:31 +0200 Subject: [PATCH 135/263] [CI] fix for leak in SearchCancellationIT (#102861) --- .../search/SearchCancellationIT.java | 30 +++++++++++-------- .../AbstractSearchCancellationTestCase.java | 5 +++- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 82e9ad4dc6cde..19dfe598b5318 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.search; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.MultiSearchResponse; @@ -51,7 +50,6 @@ import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102257") public class SearchCancellationIT extends AbstractSearchCancellationTestCase { @Override @@ -206,7 +204,7 @@ public void testCancellationOfScrollSearchesOnFollowupRequests() throws Exceptio public void testCancelMultiSearch() throws Exception { List plugins = initBlockFactory(); indexTestData(); - ActionFuture msearchResponse = client().prepareMultiSearch() + ActionFuture multiSearchResponse = client().prepareMultiSearch() .add( prepareSearch("test").addScriptField( "test_field", @@ -214,18 +212,24 @@ public void testCancelMultiSearch() throws Exception { ) ) .execute(); - awaitForBlock(plugins); - cancelSearch(TransportMultiSearchAction.TYPE.name()); - disableBlocks(plugins); - for (MultiSearchResponse.Item item : msearchResponse.actionGet()) { - if (item.getFailure() != null) { - assertThat(ExceptionsHelper.unwrap(item.getFailure(), TaskCancelledException.class), notNullValue()); - } else { - assertFailures(item.getResponse()); - for (ShardSearchFailure shardFailure : item.getResponse().getShardFailures()) { - assertThat(ExceptionsHelper.unwrap(shardFailure.getCause(), TaskCancelledException.class), notNullValue()); + MultiSearchResponse response = null; + try { + awaitForBlock(plugins); + cancelSearch(TransportMultiSearchAction.TYPE.name()); + disableBlocks(plugins); + response = multiSearchResponse.actionGet(); + for (MultiSearchResponse.Item item : response) { + if (item.getFailure() != null) { + assertThat(ExceptionsHelper.unwrap(item.getFailure(), TaskCancelledException.class), notNullValue()); + } else { + assertFailures(item.getResponse()); + for (ShardSearchFailure shardFailure : item.getResponse().getShardFailures()) { + assertThat(ExceptionsHelper.unwrap(shardFailure.getCause(), TaskCancelledException.class), notNullValue()); + } } } + } finally { + if (response != null) response.decRef(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java index ba0921972778f..9d151e690b071 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java @@ -123,8 +123,9 @@ protected void cancelSearch(String action) { } protected SearchResponse ensureSearchWasCancelled(ActionFuture searchResponse) { + SearchResponse response = null; try { - SearchResponse response = searchResponse.actionGet(); + response = searchResponse.actionGet(); logger.info("Search response {}", response); assertNotEquals("At least one shard should have failed", 0, response.getFailedShards()); for (ShardSearchFailure failure : response.getShardFailures()) { @@ -137,6 +138,8 @@ protected SearchResponse ensureSearchWasCancelled(ActionFuture s assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.BAD_REQUEST)); logger.info("All shards failed with", ex); return null; + } finally { + if (response != null) response.decRef(); } } From c11c2afc39711df9928d6968e2c7ecc1cbb3a414 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 1 Dec 2023 15:43:24 +0100 Subject: [PATCH 136/263] Update apm's otel version (#102851) The otel api also has to be upgraded along the new apm agent version. the versions are picked as per https://github.com/elastic/apm-agent-java/commit/546da88d55b9e0f66c89072ac696687e54fe7779#diff-b977da1986b483bc5635c37235e99d23e8825301044d4316d37d9315eff89fddR22 follow up after an apm agent version upgrade https://github.com/elastic/elasticsearch/pull/102691 --- gradle/verification-metadata.xml | 20 ++++++++++---------- modules/apm/build.gradle | 5 +++-- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index ed7ae1b5b5638..8c5022cea289d 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1,5 +1,5 @@ - + false false @@ -1401,19 +1401,19 @@ - - - + + + - - - + + + - - - + + + diff --git a/modules/apm/build.gradle b/modules/apm/build.gradle index 13f1ac4a4cd3e..4c822e44da6f6 100644 --- a/modules/apm/build.gradle +++ b/modules/apm/build.gradle @@ -12,12 +12,13 @@ esplugin { classname 'org.elasticsearch.telemetry.apm.APM' } -def otelVersion = '1.17.0' +def otelVersion = '1.31.0' +def otelSemconvVersion = '1.21.0-alpha' dependencies { implementation "io.opentelemetry:opentelemetry-api:${otelVersion}" implementation "io.opentelemetry:opentelemetry-context:${otelVersion}" - implementation "io.opentelemetry:opentelemetry-semconv:${otelVersion}-alpha" + implementation "io.opentelemetry:opentelemetry-semconv:${otelSemconvVersion}" runtimeOnly "co.elastic.apm:elastic-apm-agent:1.44.0" } From c49279a3926cf2807b97c4ef27bdc070fa8f8d6d Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Fri, 1 Dec 2023 15:45:24 +0100 Subject: [PATCH 137/263] [Connectors API] Implement update pipeline action (#102812) --- .../api/connector.update_pipeline.json | 39 ++++ .../330_connector_update_pipeline.yml | 64 ++++++ .../331_connector_update_scheduling.yml | 4 +- .../332_connector_update_filtering.yml | 6 +- .../xpack/application/EnterpriseSearch.java | 5 + .../connector/ConnectorIndexService.java | 31 +++ .../RestUpdateConnectorPipelineAction.java | 45 +++++ ...ransportUpdateConnectorPipelineAction.java | 55 +++++ .../UpdateConnectorFilteringAction.java | 14 +- .../action/UpdateConnectorPipelineAction.java | 190 ++++++++++++++++++ .../UpdateConnectorSchedulingAction.java | 4 + .../connector/ConnectorIndexServiceTests.java | 50 ++++- ...elineActionRequestBWCSerializingTests.java | 51 +++++ ...lineActionResponseBWCSerializingTests.java | 41 ++++ .../xpack/security/operator/Constants.java | 1 + 15 files changed, 592 insertions(+), 8 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/330_connector_update_pipeline.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorPipelineAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json new file mode 100644 index 0000000000000..2bd1acf7d28a6 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json @@ -0,0 +1,39 @@ +{ + "connector.update_pipeline": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the pipeline field in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_pipeline", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object with connector ingest pipeline configuration.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/330_connector_update_pipeline.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/330_connector_update_pipeline.yml new file mode 100644 index 0000000000000..8d0bfe0232932 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/330_connector_update_pipeline.yml @@ -0,0 +1,64 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Pipeline - Connector doesn't exist": + - do: + catch: "missing" + connector.update_pipeline: + connector_id: test-non-existent-connector + body: + pipeline: + extract_binary_content: true + name: test-pipeline + reduce_whitespace: true + run_ml_inference: false + +--- +"Update Connector Pipeline": + - do: + connector.update_pipeline: + connector_id: test-connector + body: + pipeline: + extract_binary_content: true + name: test-pipeline + reduce_whitespace: true + run_ml_inference: false + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { pipeline.extract_binary_content: true } + - match: { pipeline.name: test-pipeline } + - match: { pipeline.reduce_whitespace: true } + - match: { pipeline.run_ml_inference: false } + +--- +"Update Connector Pipeline - Required fields are missing": + - do: + catch: "bad_request" + connector.update_pipeline: + connector_id: test-connector + body: + pipeline: + extract_binary_content: true + name: test-pipeline + run_ml_inference: false + + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml index 191ccbb6ba4bc..e8e3fa0e87068 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/331_connector_update_scheduling.yml @@ -63,7 +63,7 @@ setup: interval: 3 0 0 * * ? --- -"Update Connector Scheduling - 400 status code returned when required fields are missing": +"Update Connector Scheduling - Required fields are missing": - do: catch: "bad_request" connector.update_scheduling: @@ -75,7 +75,7 @@ setup: interval: 3 0 0 * * ? --- -"Update Connector Scheduling - 400 status code returned with wrong CRON expression": +"Update Connector Scheduling - Wrong CRON expression": - do: catch: "bad_request" connector.update_scheduling: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml index aa735081f55e3..c5634365db3ec 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/332_connector_update_filtering.yml @@ -107,7 +107,7 @@ setup: - match: { filtering.1.draft.rules.0.id: "RULE-DRAFT-1" } --- -"Update Connector Filtering - 404 status code returned when connector doesn't exist": +"Update Connector Filtering - Connector doesn't exist": - do: catch: "missing" connector.update_filtering: @@ -186,7 +186,7 @@ setup: state: valid --- -"Update Connector Filtering - 400 status code returned when required fields are missing": +"Update Connector Filtering - Required fields are missing": - do: catch: "bad_request" connector.update_filtering: @@ -199,7 +199,7 @@ setup: status: 400 --- -"Update Connector Filtering - 400 status code returned with wrong datetime expression": +"Update Connector Filtering - Wrong datetime expression": - do: catch: "bad_request" connector.update_filtering: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 0978e4cc08f21..31e9b165e7325 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -51,14 +51,17 @@ import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; @@ -190,6 +193,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), + new ActionHandler<>(UpdateConnectorPipelineAction.INSTANCE, TransportUpdateConnectorPipelineAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), // SyncJob API @@ -252,6 +256,7 @@ public List getRestHandlers( new RestListConnectorAction(), new RestPutConnectorAction(), new RestUpdateConnectorFilteringAction(), + new RestUpdateConnectorPipelineAction(), new RestUpdateConnectorSchedulingAction(), // SyncJob API diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 6ca575580e9fd..749e8c2e9dd87 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -32,6 +32,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import java.util.Arrays; @@ -200,6 +201,36 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ } } + /** + * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. + * + * @param request Request for updating connector ingest pipeline property. + * @param listener Listener to respond to a successful response or an error. + */ + public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Updates the {@link ConnectorScheduling} property of a {@link Connector}. * diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java new file mode 100644 index 0000000000000..ba83bd42dac11 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorPipelineAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_pipeline_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_pipeline")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorPipelineAction.Request request = UpdateConnectorPipelineAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorPipelineAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorPipelineAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorPipelineAction.java new file mode 100644 index 0000000000000..c54d3db1215bc --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorPipelineAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorPipelineAction extends HandledTransportAction< + UpdateConnectorPipelineAction.Request, + UpdateConnectorPipelineAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorPipelineAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorPipelineAction.NAME, + transportService, + actionFilters, + UpdateConnectorPipelineAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorPipelineAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorPipeline( + request, + listener.map(r -> new UpdateConnectorPipelineAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java index 43d71fa40c436..68c644cb9d9db 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,6 +32,7 @@ import java.util.List; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; public class UpdateConnectorFilteringAction extends ActionType { @@ -68,7 +70,17 @@ public List getFiltering() { @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + if (filtering == null) { + validationException = addValidationError("[filtering] cannot be null.", validationException); + } + + return validationException; } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java new file mode 100644 index 0000000000000..68babb2d4b517 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java @@ -0,0 +1,190 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class UpdateConnectorPipelineAction extends ActionType { + + public static final UpdateConnectorPipelineAction INSTANCE = new UpdateConnectorPipelineAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_pipeline"; + + public UpdateConnectorPipelineAction() { + super(NAME, UpdateConnectorPipelineAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + private final ConnectorIngestPipeline pipeline; + + public Request(String connectorId, ConnectorIngestPipeline pipeline) { + this.connectorId = connectorId; + this.pipeline = pipeline; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.pipeline = in.readOptionalWriteable(ConnectorIngestPipeline::new); + } + + public String getConnectorId() { + return connectorId; + } + + public ConnectorIngestPipeline getPipeline() { + return pipeline; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + if (Objects.isNull(pipeline)) { + validationException = addValidationError("[pipeline] cannot be null.", validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "connector_update_pipeline_request", + false, + ((args, connectorId) -> new UpdateConnectorPipelineAction.Request(connectorId, (ConnectorIngestPipeline) args[0])) + ); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> ConnectorIngestPipeline.fromXContent(p), Connector.PIPELINE_FIELD); + } + + public static UpdateConnectorPipelineAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorPipelineAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorPipelineAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.PIPELINE_FIELD.getPreferredName(), pipeline); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalWriteable(pipeline); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(pipeline, request.pipeline); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, pipeline); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java index dfca87b8324cb..9867830c5d211 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java @@ -75,6 +75,10 @@ public ActionRequestValidationException validate() { validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); } + if (Objects.isNull(scheduling)) { + validationException = addValidationError("[scheduling] cannot be null.", validationException); + } + return validationException; } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 854e80ad1bf2d..5f32f27b1ec64 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; @@ -64,6 +65,28 @@ public void testDeleteConnector() throws Exception { expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete)); } + public void testUpdateConnectorPipeline() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + ConnectorIngestPipeline updatedPipeline = new ConnectorIngestPipeline.Builder().setName("test-pipeline") + .setExtractBinaryContent(false) + .setReduceWhitespace(true) + .setRunMlInference(false) + .build(); + + UpdateConnectorPipelineAction.Request updatePipelineRequest = new UpdateConnectorPipelineAction.Request( + connector.getConnectorId(), + updatedPipeline + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorPipeline(updatePipelineRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + assertThat(updatedPipeline, equalTo(indexedConnector.getPipeline())); + } + public void testUpdateConnectorFiltering() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); @@ -81,9 +104,7 @@ public void testUpdateConnectorFiltering() throws Exception { DocWriteResponse updateResponse = awaitUpdateConnectorFiltering(updateFilteringRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); - assertThat(filteringList, equalTo(indexedConnector.getFiltering())); } @@ -231,6 +252,31 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorPipeline(UpdateConnectorPipelineAction.Request updatePipeline) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorPipeline(updatePipeline, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update pipeline request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update pipeline request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorScheduling(UpdateConnectorSchedulingAction.Request updatedScheduling) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..14df1b704f995 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionRequestBWCSerializingTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorPipelineActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorPipelineAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorPipelineAction.Request::new; + } + + @Override + protected UpdateConnectorPipelineAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorPipelineAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorIngestPipeline()); + } + + @Override + protected UpdateConnectorPipelineAction.Request mutateInstance(UpdateConnectorPipelineAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorPipelineAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorPipelineAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorPipelineAction.Request mutateInstanceForVersion( + UpdateConnectorPipelineAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..065dafcaf00a4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionResponseBWCSerializingTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorPipelineActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorPipelineAction.Response> { + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorPipelineAction.Response::new; + } + + @Override + protected UpdateConnectorPipelineAction.Response createTestInstance() { + return new UpdateConnectorPipelineAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorPipelineAction.Response mutateInstance(UpdateConnectorPipelineAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorPipelineAction.Response mutateInstanceForVersion( + UpdateConnectorPipelineAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 67c0b2d2ca41d..2325a2db8c077 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -131,6 +131,7 @@ public class Constants { "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/check_in", "cluster:admin/xpack/connector/update_filtering", + "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", From cf9337f57e8c84e6697859f449c536c755a61d8c Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 1 Dec 2023 06:50:52 -0800 Subject: [PATCH 138/263] AwaitsFix for #102868 --- .../test/java/org/elasticsearch/ExceptionSerializationTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index abf79243b6a61..2263bfe78f218 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -353,6 +353,7 @@ public void testActionTransportException() throws IOException { assertEquals("[name?][" + transportAddress + "][ACTION BABY!] message?", ex.getMessage()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102868") public void testSearchContextMissingException() throws IOException { ShardSearchContextId contextId = new ShardSearchContextId(UUIDs.randomBase64UUID(), randomLong()); TransportVersion version = TransportVersionUtils.randomVersion(random()); From 3154595497ba079a77628bcd759e7d45dcd6bb76 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 1 Dec 2023 15:51:51 +0100 Subject: [PATCH 139/263] ES|QL: Add random test query generator (#102073) --- .../esql/qa/single_node/GenerativeIT.java | 14 + .../rest/generative/EsqlQueryGenerator.java | 401 ++++++++++++++++++ .../rest/generative/GenerativeRestTest.java | 103 +++++ 3 files changed, 518 insertions(+) create mode 100644 x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java create mode 100644 x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java create mode 100644 x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java new file mode 100644 index 0000000000000..e499b13bf1db8 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/GenerativeIT.java @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; +import org.elasticsearch.xpack.esql.qa.rest.generative.GenerativeRestTest; + +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102084") +public class GenerativeIT extends GenerativeRestTest {} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java new file mode 100644 index 0000000000000..25530e3d744ad --- /dev/null +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/EsqlQueryGenerator.java @@ -0,0 +1,401 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.rest.generative; + +import org.elasticsearch.xpack.esql.CsvTestsDataLoader; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomFrom; +import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.test.ESTestCase.randomLongBetween; + +public class EsqlQueryGenerator { + + public record Column(String name, String type) {} + + public record QueryExecuted(String query, int depth, List outputSchema, Exception exception) {} + + public static String sourceCommand(List availabeIndices) { + return switch (randomIntBetween(0, 2)) { + case 0 -> from(availabeIndices); + case 1 -> showFunctions(); + default -> row(); + }; + + } + + /** + * @param previousOutput a list of fieldName+type + * @param policies + * @return a new command that can process it as input + */ + public static String pipeCommand(List previousOutput, List policies) { + return switch (randomIntBetween(0, 11)) { + case 0 -> dissect(previousOutput); + case 1 -> drop(previousOutput); + case 2 -> enrich(previousOutput, policies); + case 3 -> eval(previousOutput); + case 4 -> grok(previousOutput); + case 5 -> keep(previousOutput); + case 6 -> limit(); + case 7 -> mvExpand(previousOutput); + case 8 -> rename(previousOutput); + case 9 -> sort(previousOutput); + case 10 -> stats(previousOutput); + default -> where(previousOutput); + }; + } + + private static String where(List previousOutput) { + // TODO more complex conditions + StringBuilder result = new StringBuilder(" | where "); + int nConditions = randomIntBetween(1, 5); + for (int i = 0; i < nConditions; i++) { + String exp = booleanExpression(previousOutput); + if (exp == null) { + // cannot generate expressions, just skip + return ""; + } + if (i > 0) { + result.append(randomBoolean() ? " AND " : " OR "); + } + if (randomBoolean()) { + result.append(" NOT "); + } + result.append(exp); + } + + return result.toString(); + } + + private static String booleanExpression(List previousOutput) { + // TODO LIKE, RLIKE, functions etc. + return switch (randomIntBetween(0, 3)) { + case 0 -> { + String field = randomNumericField(previousOutput); + if (field == null) { + yield null; + } + yield field + " " + mathCompareOperator() + " 50"; + } + case 1 -> "true"; + default -> "false"; + }; + } + + private static String mathCompareOperator() { + return switch (randomIntBetween(0, 5)) { + case 0 -> "=="; + case 1 -> ">"; + case 2 -> ">="; + case 3 -> "<"; + case 4 -> "<="; + default -> "!="; + }; + } + + private static String enrich(List previousOutput, List policies) { + String field = randomKeywordField(previousOutput); + if (field == null || policies.isEmpty()) { + return ""; + } + // TODO add WITH + return " | enrich " + randomFrom(policies).policyName() + " on " + field; + } + + private static String grok(List previousOutput) { + String field = randomStringField(previousOutput); + if (field == null) { + return "";// no strings to grok, just skip + } + StringBuilder result = new StringBuilder(" | grok "); + result.append(field); + result.append(" \""); + for (int i = 0; i < randomIntBetween(1, 3); i++) { + if (i > 0) { + result.append(" "); + } + result.append("%{WORD:"); + if (randomBoolean()) { + result.append(randomAlphaOfLength(5)); + } else { + result.append(randomName(previousOutput)); + } + result.append("}"); + } + result.append("\""); + return result.toString(); + } + + private static String dissect(List previousOutput) { + String field = randomStringField(previousOutput); + if (field == null) { + return "";// no strings to dissect, just skip + } + StringBuilder result = new StringBuilder(" | dissect "); + result.append(field); + result.append(" \""); + for (int i = 0; i < randomIntBetween(1, 3); i++) { + if (i > 0) { + result.append(" "); + } + result.append("%{"); + if (randomBoolean()) { + result.append(randomAlphaOfLength(5)); + } else { + result.append(randomName(previousOutput)); + } + result.append("}"); + } + result.append("\""); + return result.toString(); + } + + private static String keep(List previousOutput) { + int n = randomIntBetween(1, previousOutput.size()); + Set proj = new HashSet<>(); + for (int i = 0; i < n; i++) { + if (randomIntBetween(0, 100) < 5) { + proj.add("*"); + } else { + String name = randomName(previousOutput); + if (name.length() > 1 && randomIntBetween(0, 100) < 10) { + if (randomBoolean()) { + name = name.substring(0, randomIntBetween(1, name.length() - 1)) + "*"; + } else { + name = "*" + name.substring(randomIntBetween(1, name.length() - 1)); + } + } + proj.add(name); + } + } + return " | keep " + proj.stream().collect(Collectors.joining(", ")); + } + + private static String randomName(List previousOutput) { + return previousOutput.get(randomIntBetween(0, previousOutput.size() - 1)).name(); + } + + private static String rename(List previousOutput) { + int n = randomIntBetween(1, Math.min(3, previousOutput.size())); + List proj = new ArrayList<>(); + List names = new ArrayList<>(previousOutput.stream().map(Column::name).collect(Collectors.toList())); + for (int i = 0; i < n; i++) { + String name = names.remove(randomIntBetween(0, names.size() - 1)); + String newName; + if (names.isEmpty() || randomBoolean()) { + newName = randomAlphaOfLength(5); + } else { + newName = names.get(randomIntBetween(0, names.size() - 1)); + } + names.add(newName); + proj.add(name + " AS " + newName); + } + return " | rename " + proj.stream().collect(Collectors.joining(", ")); + } + + private static String drop(List previousOutput) { + if (previousOutput.size() < 2) { + return ""; // don't drop all of them, just do nothing + } + int n = randomIntBetween(1, previousOutput.size() - 1); + Set proj = new HashSet<>(); + for (int i = 0; i < n; i++) { + String name = randomName(previousOutput); + if (name.length() > 1 && randomIntBetween(0, 100) < 10) { + if (randomBoolean()) { + name = name.substring(0, randomIntBetween(1, name.length() - 1)) + "*"; + } else { + name = "*" + name.substring(randomIntBetween(1, name.length() - 1)); + } + } + proj.add(name); + } + return " | drop " + proj.stream().collect(Collectors.joining(", ")); + } + + private static String sort(List previousOutput) { + int n = randomIntBetween(1, previousOutput.size()); + Set proj = new HashSet<>(); + for (int i = 0; i < n; i++) { + proj.add(randomName(previousOutput)); + } + return " | sort " + + proj.stream() + .map(x -> x + randomFrom("", " ASC", " DESC") + randomFrom("", " NULLS FIRST", " NULLS LAST")) + .collect(Collectors.joining(", ")); + } + + private static String mvExpand(List previousOutput) { + return " | mv_expand " + randomName(previousOutput); + } + + private static String eval(List previousOutput) { + StringBuilder cmd = new StringBuilder(" | eval "); + int nFields = randomIntBetween(1, 10); + // TODO pass newly created fields to next expressions + for (int i = 0; i < nFields; i++) { + String name; + if (randomBoolean()) { + name = randomAlphaOfLength(randomIntBetween(3, 10)); + } else { + name = randomName(previousOutput); + } + String expression = expression(previousOutput); + if (i > 0) { + cmd.append(","); + } + cmd.append(" "); + cmd.append(name); + cmd.append(" = "); + cmd.append(expression); + } + return cmd.toString(); + } + + private static String stats(List previousOutput) { + List nonNull = previousOutput.stream().filter(x -> x.type().equals("null") == false).collect(Collectors.toList()); + if (nonNull.isEmpty()) { + return ""; // cannot do any stats, just skip + } + StringBuilder cmd = new StringBuilder(" | stats "); + int nStats = randomIntBetween(1, 5); + for (int i = 0; i < nStats; i++) { + String name; + if (randomBoolean()) { + name = randomAlphaOfLength(randomIntBetween(3, 10)); + } else { + name = randomName(previousOutput); + } + String expression = agg(nonNull); + if (i > 0) { + cmd.append(","); + } + cmd.append(" "); + cmd.append(name); + cmd.append(" = "); + cmd.append(expression); + } + if (randomBoolean()) { + cmd.append(" by "); + + cmd.append(randomName(nonNull)); + } + return cmd.toString(); + } + + private static String agg(List previousOutput) { + String name = randomNumericOrDateField(previousOutput); + if (name != null && randomBoolean()) { + // numerics only + return switch (randomIntBetween(0, 1)) { + case 0 -> "max(" + name + ")"; + default -> "min(" + name + ")"; + // TODO more numerics + }; + } + // all types + name = randomName(previousOutput); + return switch (randomIntBetween(0, 2)) { + case 0 -> "count(*)"; + case 1 -> "count(" + name + ")"; + default -> "count_distinct(" + name + ")"; + }; + } + + private static String randomNumericOrDateField(List previousOutput) { + return randomName(previousOutput, Set.of("long", "integer", "double", "date")); + } + + private static String randomNumericField(List previousOutput) { + return randomName(previousOutput, Set.of("long", "integer", "double")); + } + + private static String randomStringField(List previousOutput) { + return randomName(previousOutput, Set.of("text", "keyword")); + } + + private static String randomKeywordField(List previousOutput) { + return randomName(previousOutput, Set.of("keyword")); + } + + private static String randomName(List cols, Set allowedTypes) { + List items = cols.stream().filter(x -> allowedTypes.contains(x.type())).map(Column::name).collect(Collectors.toList()); + if (items.size() == 0) { + return null; + } + return items.get(randomIntBetween(0, items.size() - 1)); + } + + private static String expression(List previousOutput) { + // TODO improve!!! + return constantExpression(); + } + + public static String limit() { + return " | limit " + randomIntBetween(0, 15000); + } + + private static String from(List availabeIndices) { + StringBuilder result = new StringBuilder("from "); + int items = randomIntBetween(1, 3); + for (int i = 0; i < items; i++) { + String pattern = indexPattern(availabeIndices.get(randomIntBetween(0, availabeIndices.size() - 1))); + if (i > 0) { + result.append(","); + } + result.append(pattern); + } + return result.toString(); + } + + private static String showFunctions() { + return "show functions"; + } + + private static String indexPattern(String indexName) { + return randomBoolean() ? indexName : indexName.substring(0, randomIntBetween(0, indexName.length())) + "*"; + } + + private static String row() { + StringBuilder cmd = new StringBuilder("row "); + int nFields = randomIntBetween(1, 10); + for (int i = 0; i < nFields; i++) { + String name = randomAlphaOfLength(randomIntBetween(3, 10)); + String expression = constantExpression(); + if (i > 0) { + cmd.append(","); + } + cmd.append(" "); + cmd.append(name); + cmd.append(" = "); + cmd.append(expression); + } + return cmd.toString(); + } + + private static String constantExpression() { + // TODO not only simple values, but also foldable expressions + return switch (randomIntBetween(0, 4)) { + case 0 -> "" + randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE); + case 1 -> "" + randomLongBetween(Long.MIN_VALUE, Long.MAX_VALUE); + case 2 -> "\"" + randomAlphaOfLength(randomIntBetween(0, 20)) + "\""; + case 3 -> "" + randomBoolean(); + default -> "null"; + }; + + } + +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java new file mode 100644 index 0000000000000..9ba54ea1941fd --- /dev/null +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.rest.generative; + +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.esql.CsvTestsDataLoader; +import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.ENRICH_POLICIES; +import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.loadDataSetIntoEs; + +public abstract class GenerativeRestTest extends ESRestTestCase { + + public static final int ITERATIONS = 50; + public static final int MAX_DEPTH = 10; + + public static final Set ALLOWED_ERRORS = Set.of( + "is ambiguous (to disambiguate use quotes or qualifiers)", + "due to ambiguities being mapped as" + ); + + @Before + public void setup() throws IOException { + if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) { + loadDataSetIntoEs(client()); + } + } + + public void test() { + List indices = availableIndices(); + List policies = availableEnrichPolicies(); + for (int i = 0; i < ITERATIONS; i++) { + String command = EsqlQueryGenerator.sourceCommand(indices); + EsqlQueryGenerator.QueryExecuted result = execute(command, 0); + if (result.exception() != null) { + checkException(result); + continue; + } + for (int j = 0; j < MAX_DEPTH; j++) { + if (result.outputSchema().isEmpty()) { + break; + } + command = EsqlQueryGenerator.pipeCommand(result.outputSchema(), policies); + result = execute(result.query() + command, result.depth() + 1); + if (result.exception() != null) { + checkException(result); + break; + } + } + } + } + + private void checkException(EsqlQueryGenerator.QueryExecuted query) { + for (String allowedError : ALLOWED_ERRORS) { + if (query.exception().getMessage().contains(allowedError)) { + return; + } + } + fail("query: " + query.query() + "\nexception: " + query.exception().getMessage()); + } + + private EsqlQueryGenerator.QueryExecuted execute(String command, int depth) { + try { + Map a = RestEsqlTestCase.runEsql(new RestEsqlTestCase.RequestObjectBuilder().query(command).build()); + List outputSchema = outputSchema(a); + return new EsqlQueryGenerator.QueryExecuted(command, depth, outputSchema, null); + } catch (Exception e) { + return new EsqlQueryGenerator.QueryExecuted(command, depth, null, e); + } + + } + + @SuppressWarnings("unchecked") + private List outputSchema(Map a) { + List> cols = (List>) a.get("columns"); + if (cols == null) { + return null; + } + return cols.stream().map(x -> new EsqlQueryGenerator.Column(x.get("name"), x.get("type"))).collect(Collectors.toList()); + } + + private List availableIndices() { + return new ArrayList<>(CSV_DATASET_MAP.keySet()); + } + + List availableEnrichPolicies() { + return ENRICH_POLICIES; + } +} From b01fe5efe2522fe0f407ad843bf5bbf336c89a72 Mon Sep 17 00:00:00 2001 From: Benjamin Trent <4357155+benwtrent@users.noreply.github.com> Date: Fri, 1 Dec 2023 09:53:18 -0500 Subject: [PATCH 140/263] Fix search.vectors/41_knn_search_byte_quantized/Knn search with mip flakiness --- .../test/search.vectors/41_knn_search_byte_quantized.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml index 12fb4d1bbcb1d..948a6e04a128b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/41_knn_search_byte_quantized.yml @@ -267,6 +267,13 @@ setup: name: rabbit.jpg vector: [0.5, 111.3, -13.0, 14.8, -156.0] + # We force merge into a single segment to make sure scores are more uniform + # Each segment can have a different quantization error, which can affect scores and mip is especially sensitive to this + - do: + indices.forcemerge: + index: mip + max_num_segments: 1 + - do: indices.refresh: {} From bc68e05a81cb36b089e9a4b9ad40d34d9f668e09 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 1 Dec 2023 06:56:03 -0800 Subject: [PATCH 141/263] AwaitsFix for #102869 --- .../main/java/org/elasticsearch/index/mapper/MapperTestCase.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 44e28132beec0..33eb25cd472c4 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1239,6 +1239,7 @@ public final void testSyntheticEmptyListNoDocValuesLoader() throws IOException { assertNoDocValueLoader(b -> b.startArray("field").endArray()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102869") public final void testBlockLoaderFromColumnReader() throws IOException { testBlockLoader(true); } From a386b445b3fb5b91922e48643b630cfa3ed297bb Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 1 Dec 2023 07:15:30 -0800 Subject: [PATCH 142/263] AwaitsFix for 102871 --- .../resources/rest-api-spec/test/esql/100_bug_fix.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index 1876d1a6d3881..e768a6b348959 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -1,8 +1,10 @@ --- "Coalesce and to_ip functions": - skip: - version: " - 8.11.99" - reason: "fixes in 8.12 or later" + version: all + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102871" + # version: " - 8.11.99" + # reason: "fixes in 8.12 or later" features: warnings - do: bulk: From 70497dcb98be59ebd552ff0a0bf75287c063b743 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 1 Dec 2023 07:21:00 -0800 Subject: [PATCH 143/263] AwaitsFix for #102873 --- .../resources/rest-api-spec/test/data_stream/10_basic.yml | 6 ++++-- .../test/data_stream/30_auto_create_data_stream.yml | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 6496930764ab8..b1e0cf8ed7d90 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -210,8 +210,10 @@ setup: --- "Create data stream with failure store": - skip: - version: " - 8.10.99" - reason: "data stream failure stores only creatable in 8.11+" + version: all + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102873" +# version: " - 8.10.99" +# reason: "data stream failure stores only creatable in 8.11+" - do: allowed_warnings: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml index 303a584555f8f..a7d8476ee2dcf 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml @@ -50,8 +50,10 @@ --- "Put index template with failure store": - skip: - version: " - 8.10.99" - reason: "data stream failure stores only creatable in 8.11+" + version: all + reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/102873" +# version: " - 8.10.99" +# reason: "data stream failure stores only creatable in 8.11+" features: allowed_warnings - do: From c335a988c7a177d5200c9913193831a93281d7fa Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 1 Dec 2023 07:32:45 -0800 Subject: [PATCH 144/263] AwaitsFix for #102863 --- .../elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java index d4db20faf0050..67e72d530e2e0 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java @@ -27,6 +27,7 @@ public class SpatialCoordinateTypesTests extends ESTestCase { record TestTypeFunctions(Supplier randomPoint, Function error) {} + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102863") public void testEncoding() { for (var type : types.entrySet()) { for (int i = 0; i < 10; i++) { From 06f02d83a724af58b5ac58800d2900d60f1acaf7 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 1 Dec 2023 07:40:57 -0800 Subject: [PATCH 145/263] AwaitsFix for #102876 --- .../org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java index e794b5afb258c..80d5a3ad71136 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java @@ -23,6 +23,7 @@ public void testEmpty() { assertThat(kde.data(), equalTo(new double[0])); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102876") public void testCdfAndSf() { double[] data = DoubleStream.generate(() -> randomDoubleBetween(0.0, 100.0, true)).limit(101).toArray(); From 83cc25cb715baedc8fb81dcbabcb3978264da5a8 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 1 Dec 2023 17:08:23 +0100 Subject: [PATCH 146/263] Skip global ordinals loading if query does not match after rewrite (#102844) This avoids these stacktraces in case the main query has been rewritten to `MatchNoneQueryBuilder`: ``` org.apache.lucene.util.packed.PackedInts.getMutable(PackedInts.java) org.apache.lucene.util.packed.PackedInts.getMutable(PackedInts.java:706) org.apache.lucene.util.packed.PackedLongValues$Builder.pack(PackedLongValues.java:283) org.apache.lucene.util.packed.PackedLongValues$Builder.pack(PackedLongValues.java:260) org.apache.lucene.util.packed.PackedLongValues$Builder.add(PackedLongValues.java:243) org.apache.lucene.index.OrdinalMap.(OrdinalMap.java:295) org.apache.lucene.index.OrdinalMap.build(OrdinalMap.java:182) org.apache.lucene.index.OrdinalMap.build(OrdinalMap.java:160) org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder.build(GlobalOrdinalsBuilder.java:53) org.elasticsearch.index.fielddata.plain.AbstractIndexOrdinalsFieldData.loadGlobalDirect(AbstractIndexOrdinalsFieldData.java:139) org.elasticsearch.index.fielddata.plain.AbstractIndexOrdinalsFieldData.loadGlobalDirect(AbstractIndexOrdinalsFieldData.java:32) org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache$IndexFieldCache.lambda$load$1(IndicesFieldDataCache.java:164) org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache$IndexFieldCache$$Lambda+0x000000f002931690.load() org.elasticsearch.common.cache.Cache.computeIfAbsent(Cache.java:418) org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache$IndexFieldCache.load(IndicesFieldDataCache.java:161) org.elasticsearch.index.fielddata.plain.AbstractIndexOrdinalsFieldData.loadGlobalInternal(AbstractIndexOrdinalsFieldData.java:127) org.elasticsearch.index.fielddata.plain.AbstractIndexOrdinalsFieldData.loadGlobal(AbstractIndexOrdinalsFieldData.java:93) org.elasticsearch.search.aggregations.support.ValuesSource$Bytes$WithOrdinals$FieldData.globalOrdinalsValues(ValuesSource.java:283) org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.globalOrdsValues(TermsAggregatorFactory.java:590) org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory$ExecutionMode$2.create(SignificantTermsAggregatorFactory.java:368) org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory.lambda$bytesSupplier$0(SignificantTermsAggregatorFactory.java:101) org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory$$Lambda+0x000000f001a7e878.build() org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory.doCreateInternal(SignificantTermsAggregatorFactory.java:279) org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory.createInternal(ValuesSourceAggregatorFactory.java:41) org.elasticsearch.search.aggregations.AggregatorFactory.create(AggregatorFactory.java:80) org.elasticsearch.search.aggregations.AggregatorFactories.createSubAggregators(AggregatorFactories.java:233) org.elasticsearch.search.aggregations.AggregatorFactories.createTopLevelAggregators(AggregatorFactories.java:243) org.elasticsearch.search.aggregations.AggregationPhase.newAggregatorCollector(AggregationPhase.java:51) org.elasticsearch.search.aggregations.AggregationPhase.lambda$preProcess$1(AggregationPhase.java:37) org.elasticsearch.search.aggregations.AggregationPhase$$Lambda+0x000000f00292eeb0.get() org.elasticsearch.search.aggregations.AggregatorCollectorManager.newCollector(AggregatorCollectorManager.java:39) org.elasticsearch.search.aggregations.AggregatorCollectorManager.newCollector(AggregatorCollectorManager.java:21) org.elasticsearch.search.query.QueryPhaseCollectorManager.newCollector(QueryPhaseCollectorManager.java:139) org.elasticsearch.search.internal.ContextIndexSearcher.search(ContextIndexSearcher.java:311) org.elasticsearch.search.query.QueryPhase.addCollectorsAndSearch(QueryPhase.java:205) org.elasticsearch.search.query.QueryPhase.executeQuery(QueryPhase.java:135) org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:63) org.elasticsearch.indices.IndicesService.lambda$loadIntoContext$31(IndicesService.java:1563) org.elasticsearch.indices.IndicesService$$Lambda+0x000000f0028f8ac8.accept() org.elasticsearch.indices.IndicesService.lambda$cacheShardLevelResult$32(IndicesService.java:1629) org.elasticsearch.indices.IndicesService$$Lambda+0x000000f0028f9210.get() org.elasticsearch.indices.IndicesRequestCache$Loader.load(IndicesRequestCache.java:174) org.elasticsearch.indices.IndicesRequestCache$Loader.load(IndicesRequestCache.java:157) org.elasticsearch.common.cache.Cache.computeIfAbsent(Cache.java:418) org.elasticsearch.indices.IndicesRequestCache.getOrCompute(IndicesRequestCache.java:120) org.elasticsearch.indices.IndicesService.cacheShardLevelResult(IndicesService.java:1635) org.elasticsearch.indices.IndicesService.loadIntoContext(IndicesService.java:1557) org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:516) org.elasticsearch.search.SearchService.executeQueryPhase(SearchService.java:671) org.elasticsearch.search.SearchService.lambda$executeQueryPhase$2(SearchService.java:543) org.elasticsearch.search.SearchService$$Lambda+0x000000f0028f6410.get() org.elasticsearch.action.ActionRunnable$2.accept(ActionRunnable.java:51) org.elasticsearch.action.ActionRunnable$2.accept(ActionRunnable.java:48) org.elasticsearch.action.ActionRunnable$3.doRun(ActionRunnable.java:73) org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) org.elasticsearch.common.util.concurrent.TimedRunnable.doRun(TimedRunnable.java:33) org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:983) org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) java.lang.Thread.runWith(Thread.java:1596) java.lang.Thread.run(Thread.java:1583) ``` --- docs/changelog/102844.yaml | 5 +++ .../elasticsearch/search/SearchService.java | 2 ++ .../SignificantTermsAggregatorFactory.java | 33 +++++++++++++++++-- .../bucket/terms/TermsAggregatorFactory.java | 4 ++- .../SignificantTermsAggregatorTests.java | 24 ++++++++++++++ .../bucket/terms/TermsAggregatorTests.java | 23 ++++++++++++- 6 files changed, 86 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/102844.yaml diff --git a/docs/changelog/102844.yaml b/docs/changelog/102844.yaml new file mode 100644 index 0000000000000..d05547c3aa9da --- /dev/null +++ b/docs/changelog/102844.yaml @@ -0,0 +1,5 @@ +pr: 102844 +summary: Skip global ordinals loading if query does not match after rewrite +area: Aggregations +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 548e3fea9d91c..9e59bfda96d19 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -540,6 +540,8 @@ public void executeQueryPhase(ShardSearchRequest request, SearchShardTask task, return; } } + // TODO: i think it makes sense to always do a canMatch here and + // return an empty response (not null response) in case canMatch is false? ensureAfterSeqNoRefreshed(shard, orig, () -> executeQueryPhase(orig, task), l); })); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java index 2cadbd3d43494..f47e28bbc6dbd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java @@ -9,7 +9,10 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.MatchNoDocsQuery; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.query.QueryBuilder; @@ -23,6 +26,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.BucketUtils; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.SignificanceHeuristic; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -32,6 +36,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.xcontent.ParseField; import java.io.IOException; @@ -81,7 +86,7 @@ private static SignificantTermsAggregatorSupplier bytesSupplier() { if (executionHint != null) { execution = ExecutionMode.fromString(executionHint, deprecationLogger); } - if (valuesSourceConfig.hasOrdinals() == false) { + if (valuesSourceConfig.hasOrdinals() == false || matchNoDocs(context, parent)) { execution = ExecutionMode.MAP; } if (execution == null) { @@ -115,6 +120,30 @@ private static SignificantTermsAggregatorSupplier bytesSupplier() { }; } + /** + * Whether the aggregation will execute. If the main query matches no documents and parent aggregation isn't a global or terms + * aggregation with min_doc_count = 0, the the aggregator will not really execute. In those cases it doesn't make sense to load + * global ordinals. + *

    + * Some searches that will never match can still fall through and we endup running query that will produce no results. + * However even in that case we sometimes do expensive things like loading global ordinals. This method should prevent this. + * Note that if {@link org.elasticsearch.search.SearchService#executeQueryPhase(ShardSearchRequest, SearchShardTask, ActionListener)} + * always do a can match then we don't need this code here. + */ + static boolean matchNoDocs(AggregationContext context, Aggregator parent) { + if (context.query() instanceof MatchNoDocsQuery) { + while (parent != null) { + if (parent instanceof GlobalAggregator) { + return false; + } + parent = parent.parent(); + } + return true; + } else { + return false; + } + } + /** * This supplier is used for all fields that expect to be aggregated as a numeric value. * This includes floating points, and formatted types that use numerics internally for storage (date, boolean, etc) @@ -296,7 +325,6 @@ protected Aggregator doCreateInternal(Aggregator parent, CardinalityUpperBound c public enum ExecutionMode { MAP(new ParseField("map")) { - @Override Aggregator create( String name, @@ -335,7 +363,6 @@ Aggregator create( }, GLOBAL_ORDINALS(new ParseField("global_ordinals")) { - @Override Aggregator create( String name, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index e17cd828a24d0..68a4ffca22b51 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -45,6 +45,8 @@ import java.util.function.Function; import java.util.function.LongPredicate; +import static org.elasticsearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory.matchNoDocs; + public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory { static Boolean REMAP_GLOBAL_ORDS, COLLECT_SEGMENT_ORDS; @@ -107,7 +109,7 @@ private static TermsAggregatorSupplier bytesSupplier() { execution = ExecutionMode.fromString(executionHint); } // In some cases, using ordinals is just not supported: override it - if (valuesSource.hasOrdinals() == false) { + if (valuesSource.hasOrdinals() == false || matchNoDocs(context, parent)) { execution = ExecutionMode.MAP; } if (execution == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java index 5c467893179ee..87d4137b5bc59 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java @@ -20,12 +20,15 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.BinaryFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -60,6 +63,7 @@ import java.util.TreeSet; import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; +import static org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorTests.doc; import static org.hamcrest.Matchers.equalTo; public class SignificantTermsAggregatorTests extends AggregatorTestCase { @@ -668,6 +672,26 @@ public void testThreeLayerLong() throws IOException { } } + public void testMatchNoDocsQuery() throws Exception { + MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string", randomBoolean(), true, Collections.emptyMap()); + SignificantTermsAggregationBuilder aggregationBuilder = new SignificantTermsAggregationBuilder("_name").field("string"); + CheckedConsumer createIndex = iw -> { + iw.addDocument(doc(fieldType, "a", "b")); + iw.addDocument(doc(fieldType, "", "c", "a")); + iw.addDocument(doc(fieldType, "b", "d")); + iw.addDocument(doc(fieldType, "")); + }; + testCase( + createIndex, + (SignificantStringTerms result) -> { assertEquals(0, result.getBuckets().size()); }, + new AggTestConfig(aggregationBuilder, fieldType).withQuery(new MatchNoDocsQuery()) + ); + + debugTestCase(aggregationBuilder, new MatchNoDocsQuery(), createIndex, (result, impl, debug) -> { + assertEquals(impl, MapStringTermsAggregator.class); + }, fieldType); + } + private void addMixedTextDocs(IndexWriter w) throws IOException { for (int i = 0; i < 10; i++) { Document doc = new Document(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index b0d67879b26a1..204e9025ce9a2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TotalHits; @@ -285,6 +286,26 @@ public void testSimple() throws Exception { }, new AggTestConfig(aggregationBuilder, fieldType)); } + public void testMatchNoDocsQuery() throws Exception { + MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string", randomBoolean(), true, Collections.emptyMap()); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name").field("string"); + CheckedConsumer createIndex = iw -> { + iw.addDocument(doc(fieldType, "a", "b")); + iw.addDocument(doc(fieldType, "", "c", "a")); + iw.addDocument(doc(fieldType, "b", "d")); + iw.addDocument(doc(fieldType, "")); + }; + testCase( + createIndex, + (InternalTerms result) -> { assertEquals(0, result.getBuckets().size()); }, + new AggTestConfig(aggregationBuilder, fieldType).withQuery(new MatchNoDocsQuery()) + ); + + debugTestCase(aggregationBuilder, new MatchNoDocsQuery(), createIndex, (result, impl, debug) -> { + assertEquals(impl, MapStringTermsAggregator.class); + }, fieldType); + } + public void testStringShardMinDocCount() throws IOException { MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType("string", true, true, Collections.emptyMap()); for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) { @@ -419,7 +440,7 @@ public void testDelaysSubAggs() throws Exception { }); } - private List doc(MappedFieldType ft, String... values) { + static List doc(MappedFieldType ft, String... values) { List doc = new ArrayList(); for (String v : values) { BytesRef bytes = new BytesRef(v); From 7b3fcc02941f4b971259e61aa7c87bda7b52a54e Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 1 Dec 2023 11:10:39 -0500 Subject: [PATCH 147/263] ESQL: Add `profile` option (#102713) This adds a `profile` option to ESQL queries which I think will be useful to debug performance issues with ESQL. It looks like: ``` "profile" : { "drivers" : [ { "operators" : [ { "operator" : "ExchangeSourceOperator", "status" : { "pages_waiting" : 1, "pages_emitted" : 1 } }, { "operator" : "LimitOperator[limit = 0/10]", "status" : { "limit" : 10, "limit_remaining" : 0, "pages_processed" : 1 } }, { "operator" : "OutputOperator[columns = [3092 columns]]" } ] }, { "operators" : [ { "operator" : "LuceneSourceOperator[maxPageSize=10, remainingDocs=0]", "status" : { "processed_slices" : 1, "slice_index" : 1, "total_slices" : 11, "pages_emitted" : 1, "slice_min" : 0, "slice_max" : 3740, "current" : 10 } }, { ``` We should consider everything that comes out of the `profile` options as debug information. Clients shouldn't parse it - or, if they do, it's just a free form json object without any consistency between requests. This is just for debugging information. --- docs/changelog/102713.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../xcontent/ChunkedToXContentHelper.java | 22 +++- .../operator/AbstractPageMappingOperator.java | 2 +- .../compute/operator/Driver.java | 12 +- .../compute/operator/DriverProfile.java | 74 ++++++++++++ .../compute/operator/DriverStatus.java | 54 +++++++-- .../compute/operator/DriverProfileTests.java | 70 +++++++++++ .../compute/operator/DriverStatusTests.java | 52 ++++---- .../ExchangeSinkOperatorStatusTests.java | 14 ++- .../xpack/esql/EsqlTestUtils.java | 3 +- .../xpack/esql/action/EsqlQueryRequest.java | 18 +++ .../xpack/esql/action/EsqlQueryResponse.java | 111 +++++++++++++++--- .../xpack/esql/plugin/ComputeService.java | 87 +++++++++++--- .../esql/plugin/TransportEsqlQueryAction.java | 10 +- .../xpack/esql/session/EsqlConfiguration.java | 28 ++++- .../action/EsqlQueryResponseProfileTests.java | 58 +++++++++ .../esql/action/EsqlQueryResponseTests.java | 66 +++++++++-- .../xpack/esql/formatter/TextFormatTests.java | 8 +- .../esql/formatter/TextFormatterTests.java | 3 + .../xpack/esql/planner/EvalMapperTests.java | 3 +- .../planner/LocalExecutionPlannerTests.java | 3 +- .../EsqlConfigurationSerializationTests.java | 18 ++- 23 files changed, 621 insertions(+), 101 deletions(-) create mode 100644 docs/changelog/102713.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java diff --git a/docs/changelog/102713.yaml b/docs/changelog/102713.yaml new file mode 100644 index 0000000000000..278d7d4ffb129 --- /dev/null +++ b/docs/changelog/102713.yaml @@ -0,0 +1,5 @@ +pr: 102713 +summary: "ESQL: Add `profile` option" +area: ES|QL +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 44f98305d2997..4627a3d907133 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -181,6 +181,7 @@ static TransportVersion def(int id) { public static final TransportVersion GET_API_KEY_INVALIDATION_TIME_ADDED = def(8_548_00_0); public static final TransportVersion ML_INFERENCE_GET_MULTIPLE_MODELS = def(8_549_00_0); public static final TransportVersion INFERENCE_SERVICE_RESULTS_ADDED = def(8_550_00_0); + public static final TransportVersion ESQL_PROFILE = def(8_551_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java index 5a49896cf1a36..4eaf9b5636623 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java @@ -75,11 +75,11 @@ public static Iterator field(String name, String value) { } /** - * Creates an Iterator to serialize a named field where the value is represented by a chunked ToXContext. + * Creates an Iterator to serialize a named field where the value is represented by a {@link ChunkedToXContentObject}. * Chunked equivalent for {@code XContentBuilder field(String name, ToXContent value)} * @param name name of the field - * @param value ChunkedToXContent value for this field (single value, object or array) - * @param params ToXContent params to propagate for XContent serialization + * @param value value for this field + * @param params params to propagate for XContent serialization * @return Iterator composing field name and value serialization */ public static Iterator field(String name, ChunkedToXContentObject value, ToXContent.Params params) { @@ -90,6 +90,22 @@ public static Iterator array(String name, Iterator array(String name, Iterator contents, ToXContent.Params params) { + return Iterators.concat( + ChunkedToXContentHelper.startArray(name), + Iterators.flatMap(contents, c -> c.toXContentChunked(params)), + ChunkedToXContentHelper.endArray() + ); + } + public static Iterator wrapWithObject(String name, Iterator iterator) { return Iterators.concat(startObject(name), iterator, endObject()); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java index ca4dbccb5b442..5924e4086c743 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AbstractPageMappingOperator.java @@ -95,7 +95,7 @@ public static class Status implements Operator.Status { private final int pagesProcessed; - protected Status(int pagesProcessed) { + public Status(int pagesProcessed) { this.pagesProcessed = pagesProcessed; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java index 176b2bda31e3e..3e9793ef87b2a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/Driver.java @@ -52,6 +52,7 @@ public class Driver implements Releasable, Describable { private final DriverContext driverContext; private final Supplier description; private final List activeOperators; + private final List statusOfCompletedOperators = new ArrayList<>(); private final Releasable releasable; private final long statusNanos; @@ -97,7 +98,9 @@ public Driver( this.activeOperators.add(sink); this.statusNanos = statusInterval.nanos(); this.releasable = releasable; - this.status = new AtomicReference<>(new DriverStatus(sessionId, System.currentTimeMillis(), DriverStatus.Status.QUEUED, List.of())); + this.status = new AtomicReference<>( + new DriverStatus(sessionId, System.currentTimeMillis(), DriverStatus.Status.QUEUED, List.of(), List.of()) + ); } /** @@ -229,7 +232,9 @@ private SubscribableListener runSingleLoopIteration() { List finishedOperators = this.activeOperators.subList(0, index + 1); Iterator itr = finishedOperators.iterator(); while (itr.hasNext()) { - itr.next().close(); + Operator op = itr.next(); + statusOfCompletedOperators.add(new DriverStatus.OperatorStatus(op.toString(), op.status())); + op.close(); itr.remove(); } @@ -394,7 +399,8 @@ private DriverStatus updateStatus(DriverStatus.Status status) { sessionId, System.currentTimeMillis(), status, - activeOperators.stream().map(o -> new DriverStatus.OperatorStatus(o.toString(), o.status())).toList() + statusOfCompletedOperators, + activeOperators.stream().map(op -> new DriverStatus.OperatorStatus(op.toString(), op.status())).toList() ); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java new file mode 100644 index 0000000000000..d82ddc1899b1c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverProfile.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.xcontent.ToXContent; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * Profile results from a single {@link Driver}. + */ +public class DriverProfile implements Writeable, ChunkedToXContentObject { + /** + * Status of each {@link Operator} in the driver when it finishes. + */ + private final List operators; + + public DriverProfile(List operators) { + this.operators = operators; + } + + public DriverProfile(StreamInput in) throws IOException { + this.operators = in.readCollectionAsImmutableList(DriverStatus.OperatorStatus::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(operators); + } + + List operators() { + return operators; + } + + @Override + public Iterator toXContentChunked(ToXContent.Params params) { + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + ChunkedToXContentHelper.array("operators", operators.iterator()), + ChunkedToXContentHelper.endObject() + ); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DriverProfile that = (DriverProfile) o; + return Objects.equals(operators, that.operators); + } + + @Override + public int hashCode() { + return Objects.hash(operators); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java index b3326e395def2..5a6265b37e3c6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverStatus.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -46,20 +47,41 @@ public class DriverStatus implements Task.Status { * The state of the overall driver - queue, starting, running, finished. */ private final Status status; + + /** + * Status of each completed {@link Operator} in the driver. + */ + private final List completedOperators; + /** - * Status of each {@link Operator} in the driver. + * Status of each active {@link Operator} in the driver. */ private final List activeOperators; - DriverStatus(String sessionId, long lastUpdated, Status status, List activeOperators) { + DriverStatus( + String sessionId, + long lastUpdated, + Status status, + List completedOperators, + List activeOperators + ) { this.sessionId = sessionId; this.lastUpdated = lastUpdated; this.status = status; + this.completedOperators = completedOperators; this.activeOperators = activeOperators; } - DriverStatus(StreamInput in) throws IOException { - this(in.readString(), in.readLong(), Status.valueOf(in.readString()), in.readCollectionAsImmutableList(OperatorStatus::new)); + public DriverStatus(StreamInput in) throws IOException { + this.sessionId = in.readString(); + this.lastUpdated = in.readLong(); + this.status = Status.valueOf(in.readString()); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + this.completedOperators = in.readCollectionAsImmutableList(OperatorStatus::new); + } else { + this.completedOperators = List.of(); + } + this.activeOperators = in.readCollectionAsImmutableList(OperatorStatus::new); } @Override @@ -67,6 +89,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(sessionId); out.writeLong(lastUpdated); out.writeString(status.toString()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + out.writeCollection(completedOperators); + } out.writeCollection(activeOperators); } @@ -97,7 +122,14 @@ public Status status() { } /** - * Status of each {@link Operator} in the driver. + * Status of each completed {@link Operator} in the driver. + */ + public List completedOperators() { + return completedOperators; + } + + /** + * Status of each active {@link Operator} in the driver. */ public List activeOperators() { return activeOperators; @@ -109,6 +141,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("sessionId", sessionId); builder.field("last_updated", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(lastUpdated)); builder.field("status", status.toString().toLowerCase(Locale.ROOT)); + builder.startArray("completed_operators"); + for (OperatorStatus completed : completedOperators) { + builder.value(completed); + } + builder.endArray(); builder.startArray("active_operators"); for (OperatorStatus active : activeOperators) { builder.value(active); @@ -125,12 +162,13 @@ public boolean equals(Object o) { return sessionId.equals(that.sessionId) && lastUpdated == that.lastUpdated && status == that.status + && completedOperators.equals(that.completedOperators) && activeOperators.equals(that.activeOperators); } @Override public int hashCode() { - return Objects.hash(sessionId, lastUpdated, status, activeOperators); + return Objects.hash(sessionId, lastUpdated, status, completedOperators, activeOperators); } @Override @@ -153,12 +191,12 @@ public static class OperatorStatus implements Writeable, ToXContentObject { @Nullable private final Operator.Status status; - OperatorStatus(String operator, Operator.Status status) { + public OperatorStatus(String operator, Operator.Status status) { this.operator = operator; this.status = status; } - private OperatorStatus(StreamInput in) throws IOException { + OperatorStatus(StreamInput in) throws IOException { operator = in.readString(); status = in.readOptionalNamedWriteable(Operator.Status.class); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java new file mode 100644 index 0000000000000..f6b4fbc817940 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.compute.lucene.LuceneSourceOperator; +import org.elasticsearch.compute.lucene.LuceneSourceOperatorStatusTests; +import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; +import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorStatusTests; +import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class DriverProfileTests extends AbstractWireSerializingTestCase { + public void testToXContent() { + DriverProfile status = new DriverProfile( + List.of( + new DriverStatus.OperatorStatus("LuceneSource", LuceneSourceOperatorStatusTests.simple()), + new DriverStatus.OperatorStatus("ValuesSourceReader", ValuesSourceReaderOperatorStatusTests.simple()) + ) + ); + assertThat( + Strings.toString(status), + equalTo( + """ + {"operators":[""" + + """ + {"operator":"LuceneSource","status":""" + + LuceneSourceOperatorStatusTests.simpleToJson() + + "},{\"operator\":\"ValuesSourceReader\",\"status\":" + + ValuesSourceReaderOperatorStatusTests.simpleToJson() + + "}]}" + ) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return DriverProfile::new; + } + + @Override + protected DriverProfile createTestInstance() { + return new DriverProfile(DriverStatusTests.randomOperatorStatuses()); + } + + @Override + protected DriverProfile mutateInstance(DriverProfile instance) throws IOException { + var operators = randomValueOtherThan(instance.operators(), DriverStatusTests::randomOperatorStatuses); + return new DriverProfile(operators); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry( + List.of(LuceneSourceOperator.Status.ENTRY, ValuesSourceReaderOperator.Status.ENTRY, ExchangeSinkOperator.Status.ENTRY) + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java index 775c30223589b..cdae4283540c4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java @@ -16,6 +16,8 @@ import org.elasticsearch.compute.lucene.LuceneSourceOperatorStatusTests; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorStatusTests; +import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; +import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperatorStatusTests; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.ESTestCase; @@ -34,21 +36,18 @@ public void testToXContent() { List.of( new DriverStatus.OperatorStatus("LuceneSource", LuceneSourceOperatorStatusTests.simple()), new DriverStatus.OperatorStatus("ValuesSourceReader", ValuesSourceReaderOperatorStatusTests.simple()) - ) - ); - assertThat( - Strings.toString(status), - equalTo( - """ - {"sessionId":"ABC:123","last_updated":"1973-11-29T09:27:23.214Z","status":"running","active_operators":[""" - + """ - {"operator":"LuceneSource","status":""" - + LuceneSourceOperatorStatusTests.simpleToJson() - + "},{\"operator\":\"ValuesSourceReader\",\"status\":" - + ValuesSourceReaderOperatorStatusTests.simpleToJson() - + "}]}" - ) + ), + List.of(new DriverStatus.OperatorStatus("ExchangeSink", ExchangeSinkOperatorStatusTests.simple())) ); + assertThat(Strings.toString(status), equalTo(""" + {"sessionId":"ABC:123","last_updated":"1973-11-29T09:27:23.214Z","status":"running", + """.trim() + """ + "completed_operators":[{"operator":"LuceneSource","status": + """.trim() + LuceneSourceOperatorStatusTests.simpleToJson() + """ + },{"operator":"ValuesSourceReader","status": + """.trim() + ValuesSourceReaderOperatorStatusTests.simpleToJson() + """ + }],"active_operators":[{"operator":"ExchangeSink","status": + """.trim() + ExchangeSinkOperatorStatusTests.simpleToJson() + "}]}")); } @Override @@ -58,7 +57,7 @@ protected Writeable.Reader instanceReader() { @Override protected DriverStatus createTestInstance() { - return new DriverStatus(randomSessionId(), randomLong(), randomStatus(), randomActiveOperators()); + return new DriverStatus(randomSessionId(), randomLong(), randomStatus(), randomOperatorStatuses(), randomOperatorStatuses()); } private String randomSessionId() { @@ -69,14 +68,15 @@ private DriverStatus.Status randomStatus() { return randomFrom(DriverStatus.Status.values()); } - private List randomActiveOperators() { - return randomList(0, 5, this::randomOperatorStatus); + static List randomOperatorStatuses() { + return randomList(0, 5, DriverStatusTests::randomOperatorStatus); } - private DriverStatus.OperatorStatus randomOperatorStatus() { + private static DriverStatus.OperatorStatus randomOperatorStatus() { Supplier status = randomFrom( new LuceneSourceOperatorStatusTests()::createTestInstance, new ValuesSourceReaderOperatorStatusTests()::createTestInstance, + new ExchangeSinkOperatorStatusTests()::createTestInstance, () -> null ); return new DriverStatus.OperatorStatus(randomAlphaOfLength(3), status.get()); @@ -87,8 +87,9 @@ protected DriverStatus mutateInstance(DriverStatus instance) throws IOException var sessionId = instance.sessionId(); long lastUpdated = instance.lastUpdated(); var status = instance.status(); - var operators = instance.activeOperators(); - switch (between(0, 3)) { + var completedOperators = instance.completedOperators(); + var activeOperators = instance.activeOperators(); + switch (between(0, 4)) { case 0: sessionId = randomValueOtherThan(sessionId, this::randomSessionId); break; @@ -99,16 +100,21 @@ protected DriverStatus mutateInstance(DriverStatus instance) throws IOException status = randomValueOtherThan(status, this::randomStatus); break; case 3: - operators = randomValueOtherThan(operators, this::randomActiveOperators); + completedOperators = randomValueOtherThan(completedOperators, DriverStatusTests::randomOperatorStatuses); + break; + case 4: + activeOperators = randomValueOtherThan(activeOperators, DriverStatusTests::randomOperatorStatuses); break; default: throw new UnsupportedOperationException(); } - return new DriverStatus(sessionId, lastUpdated, status, operators); + return new DriverStatus(sessionId, lastUpdated, status, completedOperators, activeOperators); } @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(List.of(LuceneSourceOperator.Status.ENTRY, ValuesSourceReaderOperator.Status.ENTRY)); + return new NamedWriteableRegistry( + List.of(LuceneSourceOperator.Status.ENTRY, ValuesSourceReaderOperator.Status.ENTRY, ExchangeSinkOperator.Status.ENTRY) + ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java index f342720b99903..7438055284b14 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java @@ -17,8 +17,16 @@ public class ExchangeSinkOperatorStatusTests extends AbstractWireSerializingTestCase { public void testToXContent() { - assertThat(Strings.toString(new ExchangeSinkOperator.Status(10)), equalTo(""" - {"pages_accepted":10}""")); + assertThat(Strings.toString(simple()), equalTo(simpleToJson())); + } + + public static ExchangeSinkOperator.Status simple() { + return new ExchangeSinkOperator.Status(10); + } + + public static String simpleToJson() { + return """ + {"pages_accepted":10}"""; } @Override @@ -27,7 +35,7 @@ protected Writeable.Reader instanceReader() { } @Override - protected ExchangeSinkOperator.Status createTestInstance() { + public ExchangeSinkOperator.Status createTestInstance() { return new ExchangeSinkOperator.Status(between(0, Integer.MAX_VALUE)); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index d35f7898d937f..eca8beb06576b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -105,7 +105,8 @@ public static EsqlConfiguration configuration(QueryPragmas pragmas, String query pragmas, EsqlPlugin.QUERY_RESULT_TRUNCATION_MAX_SIZE.getDefault(Settings.EMPTY), EsqlPlugin.QUERY_RESULT_TRUNCATION_DEFAULT_SIZE.getDefault(Settings.EMPTY), - query + query, + false ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java index c467f0dfc9075..0de89a4d8de2a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java @@ -62,11 +62,13 @@ public class EsqlQueryRequest extends ActionRequest implements CompositeIndicesR private static final ParseField PRAGMA_FIELD = new ParseField("pragma"); private static final ParseField PARAMS_FIELD = new ParseField("params"); private static final ParseField LOCALE_FIELD = new ParseField("locale"); + private static final ParseField PROFILE_FIELD = new ParseField("profile"); private static final ObjectParser PARSER = objectParser(EsqlQueryRequest::new); private String query; private boolean columnar; + private boolean profile; private Locale locale; private QueryBuilder filter; private QueryPragmas pragmas = new QueryPragmas(Settings.EMPTY); @@ -106,6 +108,21 @@ public boolean columnar() { return columnar; } + /** + * Enable profiling, sacrificing performance to return information about + * what operations are taking the most time. + */ + public void profile(boolean profile) { + this.profile = profile; + } + + /** + * Is profiling enabled? + */ + public boolean profile() { + return profile; + } + public void locale(Locale locale) { this.locale = locale; } @@ -154,6 +171,7 @@ private static ObjectParser objectParser(Supplier request.locale(Locale.forLanguageTag(localeTag)), LOCALE_FIELD); + parser.declareBoolean(EsqlQueryRequest::profile, PROFILE_FIELD); return parser; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index fea9dd6c526c3..b283231574540 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -8,15 +8,18 @@ package org.elasticsearch.xpack.esql.action; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -28,6 +31,8 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.UnsupportedValueSource; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; @@ -62,12 +67,7 @@ import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; -public class EsqlQueryResponse extends ActionResponse implements ChunkedToXContent, Releasable { - - private final List columns; - private final List pages; - private final boolean columnar; - +public class EsqlQueryResponse extends ActionResponse implements ChunkedToXContentObject, Releasable { private static final InstantiatingObjectParser PARSER; static { InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( @@ -80,15 +80,22 @@ public class EsqlQueryResponse extends ActionResponse implements ChunkedToXConte PARSER = parser.build(); } - public EsqlQueryResponse(List columns, List pages, boolean columnar) { + private final List columns; + private final List pages; + private final Profile profile; + private final boolean columnar; + + public EsqlQueryResponse(List columns, List pages, @Nullable Profile profile, boolean columnar) { this.columns = columns; this.pages = pages; + this.profile = profile; this.columnar = columnar; } public EsqlQueryResponse(List columns, List> values) { this.columns = columns; this.pages = List.of(valuesToPage(columns.stream().map(ColumnInfo::type).toList(), values)); + this.profile = null; this.columnar = false; } @@ -99,10 +106,15 @@ public static Writeable.Reader reader(BlockFactory blockFacto return in -> new EsqlQueryResponse(new BlockStreamInput(in, blockFactory)); } - public EsqlQueryResponse(BlockStreamInput in) throws IOException { + private EsqlQueryResponse(BlockStreamInput in) throws IOException { super(in); this.columns = in.readCollectionAsList(ColumnInfo::new); this.pages = in.readCollectionAsList(Page::new); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + this.profile = in.readOptionalWriteable(Profile::new); + } else { + this.profile = null; + } this.columnar = in.readBoolean(); } @@ -110,6 +122,9 @@ public EsqlQueryResponse(BlockStreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { out.writeCollection(columns); out.writeCollection(pages); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + out.writeOptionalWriteable(profile); + } out.writeBoolean(columnar); } @@ -125,12 +140,16 @@ public Iterator> values() { return pagesToValues(columns.stream().map(ColumnInfo::type).toList(), pages); } + public Profile profile() { + return profile; + } + public boolean columnar() { return columnar; } @Override - public Iterator toXContentChunked(ToXContent.Params unused) { + public Iterator toXContentChunked(ToXContent.Params params) { final BytesRef scratch = new BytesRef(); final Iterator valuesIt; if (pages.isEmpty()) { @@ -141,14 +160,14 @@ public Iterator toXContentChunked(ToXContent.Params unused 0, columns().size(), column -> Iterators.concat( - Iterators.single(((builder, params) -> builder.startArray())), + Iterators.single(((builder, p) -> builder.startArray())), Iterators.flatMap(pages.iterator(), page -> { ColumnInfo.PositionToXContent toXContent = columns.get(column) .positionToXContent(page.getBlock(column), scratch); return Iterators.forRange( 0, page.getPositionCount(), - position -> (builder, params) -> toXContent.positionToXContent(builder, params, position) + position -> (builder, p) -> toXContent.positionToXContent(builder, p, position) ); }), ChunkedToXContentHelper.endArray() @@ -164,22 +183,32 @@ public Iterator toXContentChunked(ToXContent.Params unused for (int column = 0; column < columnCount; column++) { toXContents[column] = columns.get(column).positionToXContent(page.getBlock(column), scratch); } - return Iterators.forRange(0, page.getPositionCount(), position -> (builder, params) -> { + return Iterators.forRange(0, page.getPositionCount(), position -> (builder, p) -> { builder.startArray(); for (int c = 0; c < columnCount; c++) { - toXContents[c].positionToXContent(builder, params, position); + toXContents[c].positionToXContent(builder, p, position); } return builder.endArray(); }); }); } - return Iterators.concat(ChunkedToXContentHelper.startObject(), ChunkedToXContentHelper.singleChunk((builder, params) -> { + Iterator columnsRender = ChunkedToXContentHelper.singleChunk((builder, p) -> { builder.startArray("columns"); for (ColumnInfo col : columns) { - col.toXContent(builder, params); + col.toXContent(builder, p); } return builder.endArray(); - }), ChunkedToXContentHelper.array("values", valuesIt), ChunkedToXContentHelper.endObject()); + }); + Iterator profileRender = profile == null + ? List.of().iterator() + : ChunkedToXContentHelper.field("profile", profile, params); + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + columnsRender, + ChunkedToXContentHelper.array("values", valuesIt), + profileRender, + ChunkedToXContentHelper.endObject() + ); } @Override @@ -198,7 +227,8 @@ public boolean equals(Object o) { EsqlQueryResponse that = (EsqlQueryResponse) o; return Objects.equals(columns, that.columns) && columnar == that.columnar - && Iterators.equals(values(), that.values(), (row1, row2) -> Iterators.equals(row1, row2, Objects::equals)); + && Iterators.equals(values(), that.values(), (row1, row2) -> Iterators.equals(row1, row2, Objects::equals)) + && Objects.equals(profile, that.profile); } @Override @@ -336,4 +366,51 @@ private static Page valuesToPage(List dataTypes, List> valu } return new Page(results.stream().map(Block.Builder::build).toArray(Block[]::new)); } + + public static class Profile implements Writeable, ChunkedToXContentObject { + private final List drivers; + + public Profile(List drivers) { + this.drivers = drivers; + } + + public Profile(StreamInput in) throws IOException { + this.drivers = in.readCollectionAsImmutableList(DriverProfile::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(drivers); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Profile profile = (Profile) o; + return Objects.equals(drivers, profile.drivers); + } + + @Override + public int hashCode() { + return Objects.hash(drivers); + } + + @Override + public Iterator toXContentChunked(ToXContent.Params params) { + return Iterators.concat( + ChunkedToXContentHelper.startObject(), + ChunkedToXContentHelper.array("drivers", drivers.iterator(), params), + ChunkedToXContentHelper.endObject() + ); + } + + List drivers() { + return drivers; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index c28867f89c981..8d7024f7d889d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.plugin; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.search.SearchRequest; @@ -28,6 +29,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeResponse; import org.elasticsearch.compute.operator.exchange.ExchangeService; @@ -86,6 +88,8 @@ * Computes the result of a {@link PhysicalPlan}. */ public class ComputeService { + public record Result(List pages, List profiles) {} + private static final Logger LOGGER = LogManager.getLogger(ComputeService.class); private final SearchService searchService; private final BigArrays bigArrays; @@ -122,7 +126,7 @@ public void execute( CancellableTask rootTask, PhysicalPlan physicalPlan, EsqlConfiguration configuration, - ActionListener> listener + ActionListener listener ) { Tuple coordinatorAndDataNodePlan = PlannerUtils.breakPlanBetweenCoordinatorAndDataNode( physicalPlan, @@ -142,7 +146,12 @@ public void execute( if (concreteIndices.isEmpty()) { var computeContext = new ComputeContext(sessionId, List.of(), configuration, null, null); - runCompute(rootTask, computeContext, coordinatorPlan, listener.map(unused -> collectedPages)); + runCompute( + rootTask, + computeContext, + coordinatorPlan, + listener.map(driverProfiles -> new Result(collectedPages, driverProfiles)) + ); return; } QueryBuilder requestFilter = PlannerUtils.requestFilter(dataNodePlan); @@ -161,18 +170,32 @@ public void execute( queryPragmas.exchangeBufferSize(), ESQL_THREAD_POOL_NAME ); + final List collectedProfiles = configuration.profile() + ? Collections.synchronizedList(new ArrayList<>()) + : null; try ( Releasable ignored = exchangeSource::decRef; - RefCountingListener requestRefs = new RefCountingListener(delegate.map(unused -> collectedPages)) + RefCountingListener requestRefs = new RefCountingListener( + delegate.map(unused -> new Result(collectedPages, collectedProfiles)) + ) ) { final AtomicBoolean cancelled = new AtomicBoolean(); // wait until the source handler is completed exchangeSource.addCompletionListener(requestRefs.acquire()); // run compute on the coordinator var computeContext = new ComputeContext(sessionId, List.of(), configuration, exchangeSource, null); - runCompute(rootTask, computeContext, coordinatorPlan, cancelOnFailure(rootTask, cancelled, requestRefs.acquire())); + runCompute( + rootTask, + computeContext, + coordinatorPlan, + cancelOnFailure(rootTask, cancelled, requestRefs.acquire()).map(driverProfiles -> { + if (configuration.profile()) { + collectedProfiles.addAll(driverProfiles); + } + return null; + }) + ); // run compute on remote nodes - // TODO: This is wrong, we need to be able to cancel runComputeOnRemoteNodes( sessionId, rootTask, @@ -180,7 +203,12 @@ public void execute( dataNodePlan, exchangeSource, targetNodes, - () -> cancelOnFailure(rootTask, cancelled, requestRefs.acquire()).map(unused -> null) + () -> cancelOnFailure(rootTask, cancelled, requestRefs.acquire()).map(response -> { + if (configuration.profile()) { + collectedProfiles.addAll(response.profiles); + } + return null; + }) ); } }) @@ -241,7 +269,7 @@ private ActionListener cancelOnFailure(CancellableTask task, AtomicBoolean }); } - void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener listener) { + void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener> listener) { listener = ActionListener.runAfter(listener, () -> Releasables.close(context.searchContexts)); final List drivers; try { @@ -273,11 +301,18 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, listener.onFailure(e); return; } + ActionListener listenerCollectingStatus = listener.map(ignored -> { + if (context.configuration.profile()) { + return drivers.stream().map(d -> new DriverProfile(d.status().completedOperators())).toList(); + } + return null; + }); + listenerCollectingStatus = ActionListener.releaseAfter(listenerCollectingStatus, () -> Releasables.close(drivers)); driverRunner.executeDrivers( task, drivers, transportService.getThreadPool().executor(ESQL_WORKER_THREAD_POOL_NAME), - ActionListener.releaseAfter(listener, () -> Releasables.close(drivers)) + listenerCollectingStatus ); } @@ -412,17 +447,36 @@ private void computeTargetNodes( } } - // TODO: To include stats/profiles private static class DataNodeResponse extends TransportResponse { - DataNodeResponse() {} + private final List profiles; + + DataNodeResponse(List profiles) { + this.profiles = profiles; + } DataNodeResponse(StreamInput in) throws IOException { super(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + if (in.readBoolean()) { + profiles = in.readCollectionAsImmutableList(DriverProfile::new); + } else { + profiles = null; + } + } else { + profiles = null; + } } @Override - public void writeTo(StreamOutput out) { - + public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + if (profiles == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeCollection(profiles); + } + } } } @@ -436,13 +490,16 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T final var sessionId = request.sessionId(); final var exchangeSink = exchangeService.getSinkHandler(sessionId); parentTask.addListener(() -> exchangeService.finishSinkHandler(sessionId, new TaskCancelledException("task cancelled"))); - final ActionListener listener = new OwningChannelActionListener<>(channel).map(nullValue -> new DataNodeResponse()); + final ActionListener listener = new OwningChannelActionListener<>(channel); acquireSearchContexts(request.shardIds(), request.aliasFilters(), ActionListener.wrap(searchContexts -> { var computeContext = new ComputeContext(sessionId, searchContexts, request.configuration(), null, exchangeSink); - runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(unused -> { + runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(driverProfiles -> { // don't return until all pages are fetched exchangeSink.addCompletionListener( - ActionListener.releaseAfter(listener, () -> exchangeService.finishSinkHandler(sessionId, null)) + ActionListener.releaseAfter( + listener.map(nullValue -> new DataNodeResponse(driverProfiles)), + () -> exchangeService.finishSinkHandler(sessionId, null) + ) ); }, e -> { exchangeService.finishSinkHandler(sessionId, e); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index de4af3497d80d..780d812e2c23b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -96,7 +96,8 @@ private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener request.pragmas(), clusterService.getClusterSettings().get(EsqlPlugin.QUERY_RESULT_TRUNCATION_MAX_SIZE), clusterService.getClusterSettings().get(EsqlPlugin.QUERY_RESULT_TRUNCATION_DEFAULT_SIZE), - request.query() + request.query(), + request.profile() ); String sessionId = sessionID(task); planExecutor.esql( @@ -110,12 +111,15 @@ private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener (CancellableTask) task, physicalPlan, configuration, - delegate.map(pages -> { + delegate.map(result -> { List columns = physicalPlan.output() .stream() .map(c -> new ColumnInfo(c.qualifiedName(), EsqlDataTypes.outputType(c.dataType()))) .toList(); - return new EsqlQueryResponse(columns, pages, request.columnar()); + EsqlQueryResponse.Profile profile = configuration.profile() + ? new EsqlQueryResponse.Profile(result.profiles()) + : null; + return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar()); }) ) ) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlConfiguration.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlConfiguration.java index 7549552dae55b..ac13f25c2d2a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlConfiguration.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlConfiguration.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.session; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.StreamInput; @@ -37,6 +38,8 @@ public class EsqlConfiguration extends Configuration implements Writeable { private final String query; + private final boolean profile; + public EsqlConfiguration( ZoneId zi, Locale locale, @@ -45,7 +48,8 @@ public EsqlConfiguration( QueryPragmas pragmas, int resultTruncationMaxSize, int resultTruncationDefaultSize, - String query + String query, + boolean profile ) { super(zi, username, clusterName); this.locale = locale; @@ -53,6 +57,7 @@ public EsqlConfiguration( this.resultTruncationMaxSize = resultTruncationMaxSize; this.resultTruncationDefaultSize = resultTruncationDefaultSize; this.query = query; + this.profile = profile; } public EsqlConfiguration(StreamInput in) throws IOException { @@ -62,6 +67,11 @@ public EsqlConfiguration(StreamInput in) throws IOException { this.resultTruncationMaxSize = in.readVInt(); this.resultTruncationDefaultSize = in.readVInt(); this.query = readQuery(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + this.profile = in.readBoolean(); + } else { + this.profile = false; + } } @Override @@ -77,6 +87,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(resultTruncationMaxSize); out.writeVInt(resultTruncationDefaultSize); writeQuery(out, query); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { + out.writeBoolean(profile); + } } public QueryPragmas pragmas() { @@ -99,6 +112,14 @@ public String query() { return query; } + /** + * Enable profiling, sacrificing performance to return information about + * what operations are taking the most time. + */ + public boolean profile() { + return profile; + } + private static void writeQuery(StreamOutput out, String query) throws IOException { if (query.length() > QUERY_COMPRESS_THRESHOLD_CHARS) { // compare on chars to avoid UTF-8 encoding unless actually required out.writeBoolean(true); @@ -130,13 +151,14 @@ public boolean equals(Object o) { && resultTruncationDefaultSize == that.resultTruncationDefaultSize && Objects.equals(pragmas, that.pragmas) && Objects.equals(locale, that.locale) - && Objects.equals(that.query, query); + && Objects.equals(that.query, query) + && profile == that.profile; } return false; } @Override public int hashCode() { - return Objects.hash(super.hashCode(), pragmas, resultTruncationMaxSize, resultTruncationDefaultSize, locale, query); + return Objects.hash(super.hashCode(), pragmas, resultTruncationMaxSize, resultTruncationDefaultSize, locale, query, profile); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java new file mode 100644 index 0000000000000..af8f6dcd550c4 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.operator.AbstractPageMappingOperator; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.compute.operator.DriverStatus; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.util.List; +import java.util.stream.Stream; + +public class EsqlQueryResponseProfileTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return EsqlQueryResponse.Profile::new; + } + + @Override + protected EsqlQueryResponse.Profile createTestInstance() { + return new EsqlQueryResponse.Profile(randomDriverProfiles()); + } + + @Override + protected EsqlQueryResponse.Profile mutateInstance(EsqlQueryResponse.Profile instance) { + return new EsqlQueryResponse.Profile(randomValueOtherThan(instance.drivers(), this::randomDriverProfiles)); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry( + Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), Block.getNamedWriteables().stream()).toList() + ); + } + + private List randomDriverProfiles() { + return randomList(10, this::randomDriverProfile); + } + + private DriverProfile randomDriverProfile() { + return new DriverProfile(randomList(10, this::randomOperatorStatus)); + } + + private DriverStatus.OperatorStatus randomOperatorStatus() { + String name = randomAlphaOfLength(4); + Operator.Status status = randomBoolean() ? null : new AbstractPageMappingOperator.Status(between(0, Integer.MAX_VALUE)); + return new DriverStatus.OperatorStatus(name, status); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 5b2aba2e9e1f3..f040933e01410 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -29,6 +29,9 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.UnsupportedValueSource; +import org.elasticsearch.compute.operator.AbstractPageMappingOperator; +import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -46,6 +49,7 @@ import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; +import java.util.stream.Stream; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; @@ -67,27 +71,29 @@ public void blockFactoryEmpty() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables()); + return new NamedWriteableRegistry( + Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), Block.getNamedWriteables().stream()).toList() + ); } @Override protected EsqlQueryResponse createXContextTestInstance(XContentType xContentType) { // columnar param can't be different from the default value (false) since the EsqlQueryResponse will be serialized (by some random // XContentType, not to a StreamOutput) and parsed back, which doesn't preserve columnar field's value. - return randomResponse(false); + return randomResponse(false, null); } @Override protected EsqlQueryResponse createTestInstance() { - return randomResponse(randomBoolean()); + return randomResponse(randomBoolean(), randomProfile()); } - EsqlQueryResponse randomResponse(boolean columnar) { + EsqlQueryResponse randomResponse(boolean columnar, EsqlQueryResponse.Profile profile) { int noCols = randomIntBetween(1, 10); List columns = randomList(noCols, noCols, this::randomColumnInfo); int noPages = randomIntBetween(1, 20); List values = randomList(noPages, noPages, () -> randomPage(columns)); - return new EsqlQueryResponse(columns, values, columnar); + return new EsqlQueryResponse(columns, values, profile, columnar); } private ColumnInfo randomColumnInfo() { @@ -99,6 +105,13 @@ private ColumnInfo randomColumnInfo() { return new ColumnInfo(randomAlphaOfLength(10), type.esType()); } + private EsqlQueryResponse.Profile randomProfile() { + if (randomBoolean()) { + return null; + } + return new EsqlQueryResponseProfileTests().createTestInstance(); + } + private Page randomPage(List columns) { return new Page(columns.stream().map(c -> { Block.Builder builder = LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c.type())).newBlockBuilder(1, blockFactory); @@ -148,23 +161,34 @@ protected EsqlQueryResponse mutateInstance(EsqlQueryResponse instance) { allNull = false; } } - return switch (allNull ? between(0, 1) : between(0, 2)) { + return switch (allNull ? between(0, 2) : between(0, 3)) { case 0 -> { int mutCol = between(0, instance.columns().size() - 1); List cols = new ArrayList<>(instance.columns()); // keep the type the same so the values are still valid but change the name cols.set(mutCol, new ColumnInfo(cols.get(mutCol).name() + "mut", cols.get(mutCol).type())); - yield new EsqlQueryResponse(cols, deepCopyOfPages(instance), instance.columnar()); + yield new EsqlQueryResponse(cols, deepCopyOfPages(instance), instance.profile(), instance.columnar()); } - case 1 -> new EsqlQueryResponse(instance.columns(), deepCopyOfPages(instance), false == instance.columnar()); - case 2 -> { + case 1 -> new EsqlQueryResponse( + instance.columns(), + deepCopyOfPages(instance), + instance.profile(), + false == instance.columnar() + ); + case 2 -> new EsqlQueryResponse( + instance.columns(), + deepCopyOfPages(instance), + randomValueOtherThan(instance.profile(), this::randomProfile), + instance.columnar() + ); + case 3 -> { int noPages = instance.pages().size(); List differentPages = List.of(); do { differentPages.forEach(p -> Releasables.closeExpectNoException(p::releaseBlocks)); differentPages = randomList(noPages, noPages, () -> randomPage(instance.columns())); } while (differentPages.equals(instance.pages())); - yield new EsqlQueryResponse(instance.columns(), differentPages, instance.columnar()); + yield new EsqlQueryResponse(instance.columns(), differentPages, instance.profile(), instance.columnar()); } default -> throw new IllegalArgumentException(); }; @@ -194,7 +218,7 @@ protected EsqlQueryResponse doParseInstance(XContentParser parser) { } public void testChunkResponseSizeColumnar() { - try (EsqlQueryResponse resp = randomResponse(true)) { + try (EsqlQueryResponse resp = randomResponse(true, null)) { int columnCount = resp.pages().get(0).getBlockCount(); int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; assertChunkCount(resp, r -> 5 + bodySize); @@ -202,7 +226,7 @@ public void testChunkResponseSizeColumnar() { } public void testChunkResponseSizeRows() { - try (EsqlQueryResponse resp = randomResponse(false)) { + try (EsqlQueryResponse resp = randomResponse(false, null)) { int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount()).sum(); assertChunkCount(resp, r -> 5 + bodySize); } @@ -226,10 +250,28 @@ private EsqlQueryResponse simple(boolean columnar) { return new EsqlQueryResponse( List.of(new ColumnInfo("foo", "integer")), List.of(new Page(new IntArrayVector(new int[] { 40, 80 }, 2).asBlock())), + null, columnar ); } + public void testProfileXContent() { + try ( + EsqlQueryResponse response = new EsqlQueryResponse( + List.of(new ColumnInfo("foo", "integer")), + List.of(new Page(new IntArrayVector(new int[] { 40, 80 }, 2).asBlock())), + new EsqlQueryResponse.Profile( + List.of(new DriverProfile(List.of(new DriverStatus.OperatorStatus("asdf", new AbstractPageMappingOperator.Status(10))))) + ), + false + ); + ) { + assertThat(Strings.toString(response), equalTo(""" + {"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]],"profile":{"drivers":[""" + """ + {"operators":[{"operator":"asdf","status":{"pages_processed":10}}]}]}}""")); + } + } + @Override protected void dispose(EsqlQueryResponse esqlQueryResponse) { esqlQueryResponse.close(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index 95d8babcc5802..9430e984039fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -231,12 +231,12 @@ public void testPlainTextEmptyCursorWithColumns() { public void testPlainTextEmptyCursorWithoutColumns() { assertEquals( StringUtils.EMPTY, - getTextBodyContent(PLAIN_TEXT.format(req(), new EsqlQueryResponse(emptyList(), emptyList(), false))) + getTextBodyContent(PLAIN_TEXT.format(req(), new EsqlQueryResponse(emptyList(), emptyList(), null, false))) ); } private static EsqlQueryResponse emptyData() { - return new EsqlQueryResponse(singletonList(new ColumnInfo("name", "keyword")), emptyList(), false); + return new EsqlQueryResponse(singletonList(new ColumnInfo("name", "keyword")), emptyList(), null, false); } private static EsqlQueryResponse regularData() { @@ -259,7 +259,7 @@ private static EsqlQueryResponse regularData() { ) ); - return new EsqlQueryResponse(headers, values, false); + return new EsqlQueryResponse(headers, values, null, false); } private static EsqlQueryResponse escapedData() { @@ -277,7 +277,7 @@ private static EsqlQueryResponse escapedData() { ) ); - return new EsqlQueryResponse(headers, values, false); + return new EsqlQueryResponse(headers, values, null, false); } private static RestRequest req() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index 558a92de70351..22e532341d30b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -61,6 +61,7 @@ public class TextFormatterTests extends ESTestCase { Block.constantNullBlock(2) ) ), + null, randomBoolean() ); @@ -123,6 +124,7 @@ public void testFormatWithoutHeader() { Block.constantNullBlock(2) ) ), + null, randomBoolean() ); @@ -161,6 +163,7 @@ public void testVeryLongPadding() { .build() ) ), + null, randomBoolean() ) ).format(false) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index 0f6ed2d1ab3bb..b4c9d7a9baeca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -74,7 +74,8 @@ public class EvalMapperTests extends ESTestCase { null, 10000000, 10000, - StringUtils.EMPTY + StringUtils.EMPTY, + false ); @ParametersFactory(argumentFormatting = "%1$s") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index c8c8029f994cc..a01d82731bc94 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -142,7 +142,8 @@ private EsqlConfiguration config() { pragmas, EsqlPlugin.QUERY_RESULT_TRUNCATION_MAX_SIZE.getDefault(null), EsqlPlugin.QUERY_RESULT_TRUNCATION_DEFAULT_SIZE.getDefault(null), - StringUtils.EMPTY + StringUtils.EMPTY, + false ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java index aaa76c068f58a..9879f7c9ed23d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlConfigurationSerializationTests.java @@ -42,8 +42,19 @@ public static EsqlConfiguration randomConfiguration(String query) { var clusterName = randomAlphaOfLengthBetween(3, 10); var truncation = randomNonNegativeInt(); var defaultTruncation = randomNonNegativeInt(); + boolean profile = randomBoolean(); - return new EsqlConfiguration(zoneId, locale, username, clusterName, randomQueryPragmas(), truncation, defaultTruncation, query); + return new EsqlConfiguration( + zoneId, + locale, + username, + clusterName, + randomQueryPragmas(), + truncation, + defaultTruncation, + query, + profile + ); } @Override @@ -53,7 +64,7 @@ protected EsqlConfiguration createTestInstance() { @Override protected EsqlConfiguration mutateInstance(EsqlConfiguration in) throws IOException { - int ordinal = between(0, 7); + int ordinal = between(0, 8); return new EsqlConfiguration( ordinal == 0 ? randomValueOtherThan(in.zoneId(), () -> randomZone().normalized()) : in.zoneId(), ordinal == 1 ? randomValueOtherThan(in.locale(), () -> randomLocale(random())) : in.locale(), @@ -64,7 +75,8 @@ protected EsqlConfiguration mutateInstance(EsqlConfiguration in) throws IOExcept : in.pragmas(), ordinal == 5 ? in.resultTruncationMaxSize() + randomIntBetween(3, 10) : in.resultTruncationMaxSize(), ordinal == 6 ? in.resultTruncationDefaultSize() + randomIntBetween(3, 10) : in.resultTruncationDefaultSize(), - ordinal == 7 ? randomAlphaOfLength(100) : in.query() + ordinal == 7 ? randomAlphaOfLength(100) : in.query(), + ordinal == 8 ? in.profile() == false : in.profile() ); } } From dac927f604b1bc8cae1b7d9c274e2a7ef6b7fff7 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 1 Dec 2023 17:12:31 +0100 Subject: [PATCH 148/263] Fix #102864 (#102874) The test failed if the value matched exactly -Integer.MIN_VALUE, in which case Jackson was casting it from long to int, but the test code was not doing the same. --- .../elasticsearch/index/mapper/GeoPointFieldMapperTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index cce44504d4f3e..69cbb1d90b951 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -705,7 +705,7 @@ protected Function loadBlockExpected() { protected static Object asJacksonNumberOutput(long l) { // Cast to int to mimic jackson-core behaviour in NumberOutput.outputLong() - if (l < 0 && l > Integer.MIN_VALUE || l >= 0 && l <= Integer.MAX_VALUE) { + if (l < 0 && l >= Integer.MIN_VALUE || l >= 0 && l <= Integer.MAX_VALUE) { return (int) l; } else { return l; From d84b96e9617be75306d5e89c38090f02209060e2 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 1 Dec 2023 17:24:44 +0100 Subject: [PATCH 149/263] Unmute already fixed test Fixed in https://github.com/elastic/elasticsearch/pull/102874/commits/004e49085af634ad5635c97a8025ae73a2a6c0af --- .../main/java/org/elasticsearch/index/mapper/MapperTestCase.java | 1 - 1 file changed, 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 33eb25cd472c4..44e28132beec0 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1239,7 +1239,6 @@ public final void testSyntheticEmptyListNoDocValuesLoader() throws IOException { assertNoDocValueLoader(b -> b.startArray("field").endArray()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102869") public final void testBlockLoaderFromColumnReader() throws IOException { testBlockLoader(true); } From 30e9986389b30e336ee3b10b325730c5eceb6043 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 1 Dec 2023 17:25:56 +0100 Subject: [PATCH 150/263] [Enterprise Search] Add cancel connector sync job endpoint (#102865) Add cancel connector sync job endpoint. --- .../api/connector_sync_job.cancel.json | 32 ++++ .../430_connector_sync_job_cancel.yml | 36 +++++ .../xpack/application/EnterpriseSearch.java | 7 +- .../syncjob/ConnectorSyncJobIndexService.java | 145 +++++++++++------- .../action/CancelConnectorSyncJobAction.java | 110 +++++++++++++ .../RestCancelConnectorSyncJobAction.java | 47 ++++++ ...TransportCancelConnectorSyncJobAction.java | 49 ++++++ .../ConnectorSyncJobIndexServiceTests.java | 105 ++++++++++++- .../syncjob/ConnectorSyncJobTestUtils.java | 5 + ...ncJobActionRequestBWCSerializingTests.java | 47 ++++++ .../CancelConnectorSyncJobActionTests.java | 36 +++++ ...portCancelConnectorSyncJobActionTests.java | 75 +++++++++ .../xpack/security/operator/Constants.java | 7 +- 13 files changed, 641 insertions(+), 60 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json new file mode 100644 index 0000000000000..883dd54bcb89b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json @@ -0,0 +1,32 @@ +{ + "connector_sync_job.cancel": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Cancels a connector sync job." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}/_cancel", + "methods": [ + "PUT" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to be canceled" + } + } + } + ] + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml new file mode 100644 index 0000000000000..e9c612cbf9f27 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml @@ -0,0 +1,36 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +"Cancel a Connector Sync Job": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: sync-job-id-to-cancel } + - do: + connector_sync_job.cancel: + connector_sync_job_id: $sync-job-id-to-cancel + + - match: { acknowledged: true } + + +--- +"Cancel a Connector Sync Job - Connector Sync Job does not exist": + - do: + connector_sync_job.check_in: + connector_sync_job_id: test-nonexistent-connector-sync-job-id + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 31e9b165e7325..3402c3a8b9d7b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -63,12 +63,15 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; @@ -199,7 +202,8 @@ protected XPackLicenseState getLicenseState() { // SyncJob API new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class), new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), - new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class) + new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), + new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class) ) ); } @@ -262,6 +266,7 @@ public List getRestHandlers( // SyncJob API new RestPostConnectorSyncJobAction(), new RestDeleteConnectorSyncJobAction(), + new RestCancelConnectorSyncJobAction(), new RestCheckInConnectorSyncJobAction() ) ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index e3276249a06b7..ab593fe99fcee 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; @@ -117,56 +118,6 @@ public void createConnectorSyncJob( } } - private String generateId() { - /* Workaround: only needed for generating an id upfront, autoGenerateId() has a side effect generating a timestamp, - * which would raise an error on the response layer later ("autoGeneratedTimestamp should not be set externally"). - * TODO: do we even need to copy the "_id" and set it as "id"? - */ - return UUIDs.base64UUID(); - } - - private void getSyncJobConnectorInfo(String connectorId, ActionListener listener) { - try { - - final GetRequest request = new GetRequest(ConnectorIndexService.CONNECTOR_INDEX_NAME, connectorId); - - clientWithOrigin.get(request, new ActionListener<>() { - @Override - public void onResponse(GetResponse response) { - final boolean connectorDoesNotExist = response.isExists() == false; - - if (connectorDoesNotExist) { - onFailure(new ResourceNotFoundException("Connector with id '" + connectorId + "' does not exist.")); - return; - } - - Map source = response.getSource(); - - @SuppressWarnings("unchecked") - final Connector syncJobConnectorInfo = new Connector.Builder().setConnectorId( - (String) source.get(Connector.ID_FIELD.getPreferredName()) - ) - .setFiltering((List) source.get(Connector.FILTERING_FIELD.getPreferredName())) - .setIndexName((String) source.get(Connector.INDEX_NAME_FIELD.getPreferredName())) - .setLanguage((String) source.get(Connector.LANGUAGE_FIELD.getPreferredName())) - .setPipeline((ConnectorIngestPipeline) source.get(Connector.PIPELINE_FIELD.getPreferredName())) - .setServiceType((String) source.get(Connector.SERVICE_TYPE_FIELD.getPreferredName())) - .setConfiguration((Map) source.get(Connector.CONFIGURATION_FIELD.getPreferredName())) - .build(); - - listener.onResponse(syncJobConnectorInfo); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - } catch (Exception e) { - listener.onFailure(e); - } - } - /** * Deletes the {@link ConnectorSyncJob} in the underlying index. * @@ -224,8 +175,98 @@ public void checkInConnectorSyncJob(String connectorSyncJobId, ActionListener listener) { + Instant cancellationRequestedAt = Instant.now(); + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, connectorSyncJobId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ) + .doc( + Map.of( + ConnectorSyncJob.STATUS_FIELD.getPreferredName(), + ConnectorSyncStatus.CANCELING, + ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), + cancellationRequestedAt + ) + ); + + try { + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + + } + + private String generateId() { + /* Workaround: only needed for generating an id upfront, autoGenerateId() has a side effect generating a timestamp, + * which would raise an error on the response layer later ("autoGeneratedTimestamp should not be set externally"). + * TODO: do we even need to copy the "_id" and set it as "id"? + */ + return UUIDs.base64UUID(); + } + + private void getSyncJobConnectorInfo(String connectorId, ActionListener listener) { + try { + + final GetRequest request = new GetRequest(ConnectorIndexService.CONNECTOR_INDEX_NAME, connectorId); + + clientWithOrigin.get(request, new ActionListener<>() { + @Override + public void onResponse(GetResponse response) { + final boolean connectorDoesNotExist = response.isExists() == false; + + if (connectorDoesNotExist) { + onFailure(new ResourceNotFoundException("Connector with id '" + connectorId + "' does not exist.")); + return; + } + + Map source = response.getSource(); + + @SuppressWarnings("unchecked") + final Connector syncJobConnectorInfo = new Connector.Builder().setConnectorId( + (String) source.get(Connector.ID_FIELD.getPreferredName()) + ) + .setFiltering((List) source.get(Connector.FILTERING_FIELD.getPreferredName())) + .setIndexName((String) source.get(Connector.INDEX_NAME_FIELD.getPreferredName())) + .setLanguage((String) source.get(Connector.LANGUAGE_FIELD.getPreferredName())) + .setPipeline((ConnectorIngestPipeline) source.get(Connector.PIPELINE_FIELD.getPreferredName())) + .setServiceType((String) source.get(Connector.SERVICE_TYPE_FIELD.getPreferredName())) + .setConfiguration((Map) source.get(Connector.CONFIGURATION_FIELD.getPreferredName())) + .build(); + + listener.onResponse(syncJobConnectorInfo); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Listeners that checks failures for IndexNotFoundException and DocumentMissingException, + * and transforms them in ResourceNotFoundException, invoking onFailure on the delegate listener. */ static class DelegatingIndexNotFoundOrDocumentMissingActionListener extends DelegatingActionListener { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java new file mode 100644 index 0000000000000..7179bbb3a62f2 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; + +public class CancelConnectorSyncJobAction extends ActionType { + + public static final CancelConnectorSyncJobAction INSTANCE = new CancelConnectorSyncJobAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/cancel"; + + private CancelConnectorSyncJobAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends ActionRequest implements ToXContentObject { + public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); + + private final String connectorSyncJobId; + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + } + + public Request(String connectorSyncJobId) { + this.connectorSyncJobId = connectorSyncJobId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError(EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, validationException); + } + + return validationException; + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJobId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CONNECTOR_SYNC_JOB_ID_FIELD.getPreferredName(), connectorSyncJobId); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "cancel_connector_sync_job_request", + false, + (args) -> new Request((String) args[0]) + ); + + static { + PARSER.declareString(constructorArg(), CONNECTOR_SYNC_JOB_ID_FIELD); + } + + public static CancelConnectorSyncJobAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java new file mode 100644 index 0000000000000..82d679c6f0ad0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction.Request.CONNECTOR_SYNC_JOB_ID_FIELD; + +public class RestCancelConnectorSyncJobAction extends BaseRestHandler { + + private static final String CONNECTOR_SYNC_JOB_ID_PARAM = CONNECTOR_SYNC_JOB_ID_FIELD.getPreferredName(); + + @Override + public String getName() { + return "connector_sync_job_cancel_action"; + } + + @Override + public List routes() { + return List.of( + new Route( + RestRequest.Method.PUT, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}/_cancel" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + CancelConnectorSyncJobAction.Request request = new CancelConnectorSyncJobAction.Request( + restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM) + ); + return restChannel -> client.execute(CancelConnectorSyncJobAction.INSTANCE, request, new RestToXContentListener<>(restChannel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobAction.java new file mode 100644 index 0000000000000..ac61dcdf08a61 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportCancelConnectorSyncJobAction extends HandledTransportAction< + CancelConnectorSyncJobAction.Request, + AcknowledgedResponse> { + + protected ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportCancelConnectorSyncJobAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + CancelConnectorSyncJobAction.NAME, + transportService, + actionFilters, + CancelConnectorSyncJobAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute(Task task, CancelConnectorSyncJobAction.Request request, ActionListener listener) { + connectorSyncJobIndexService.cancelConnectorSyncJob(request.getConnectorSyncJobId(), listener.map(r -> AcknowledgedResponse.TRUE)); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 6904f3b2760fa..cadc8b761cbe3 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; @@ -30,18 +31,23 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class ConnectorSyncJobIndexServiceTests extends ESSingleNodeTestCase { @@ -217,22 +223,87 @@ public void testCheckInConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCheckInConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testCancelConnectorSyncJob() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + ConnectorSyncStatus syncStatusBeforeUpdate = ConnectorSyncStatus.fromString( + (String) syncJobSourceBeforeUpdate.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + Object cancellationRequestedAtBeforeUpdate = syncJobSourceBeforeUpdate.get( + ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD.getPreferredName() + ); + + assertThat(syncJobId, notNullValue()); + assertThat(cancellationRequestedAtBeforeUpdate, nullValue()); + assertThat(syncStatusBeforeUpdate, not(equalTo(ConnectorSyncStatus.CANCELING))); + + UpdateResponse updateResponse = awaitCancelConnectorSyncJob(syncJobId); + + Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); + ConnectorSyncStatus syncStatusAfterUpdate = ConnectorSyncStatus.fromString( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + Instant cancellationRequestedAtAfterUpdate = Instant.parse( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD.getPreferredName()) + ); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertThat(cancellationRequestedAtAfterUpdate, notNullValue()); + assertThat(syncStatusAfterUpdate, equalTo(ConnectorSyncStatus.CANCELING)); + assertFieldsExceptSyncStatusAndCancellationRequestedAtDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); + } + + public void testCancelConnectorSyncJob_WithMissingSyncJobId_ExpectException() { + expectThrows(ResourceNotFoundException.class, () -> awaitCancelConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); + } + + private static void assertFieldsExceptSyncStatusAndCancellationRequestedAtDidNotUpdate( + Map syncJobSourceBeforeUpdate, + Map syncJobSourceAfterUpdate + ) { + assertFieldsDidNotUpdateExceptFieldList( + syncJobSourceBeforeUpdate, + syncJobSourceAfterUpdate, + List.of(ConnectorSyncJob.STATUS_FIELD, ConnectorSyncJob.CANCELATION_REQUESTED_AT_FIELD) + ); + } + private static void assertFieldsExceptLastSeenDidNotUpdate( Map syncJobSourceBeforeUpdate, Map syncJobSourceAfterUpdate ) { + assertFieldsDidNotUpdateExceptFieldList( + syncJobSourceBeforeUpdate, + syncJobSourceAfterUpdate, + List.of(ConnectorSyncJob.LAST_SEEN_FIELD) + ); + } + + private static void assertFieldsDidNotUpdateExceptFieldList( + Map syncJobSourceBeforeUpdate, + Map syncJobSourceAfterUpdate, + List fieldsWhichShouldUpdate + ) { + Set fieldsNamesWhichShouldUpdate = fieldsWhichShouldUpdate.stream() + .map(ParseField::getPreferredName) + .collect(Collectors.toSet()); + for (Map.Entry field : syncJobSourceBeforeUpdate.entrySet()) { String fieldName = field.getKey(); - boolean isNotLastSeen = fieldName.equals(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) == false; + boolean isFieldWhichShouldNotUpdate = fieldsNamesWhichShouldUpdate.contains(fieldName) == false; - if (isNotLastSeen) { + if (isFieldWhichShouldNotUpdate) { Object fieldValueBeforeUpdate = field.getValue(); Object fieldValueAfterUpdate = syncJobSourceAfterUpdate.get(fieldName); assertThat( "Every field except [" - + LAST_SEEN_FIELD_NAME - + "] should stay the same when checking in a sync job. [" + + String.join(",", fieldsNamesWhichShouldUpdate) + + "] should stay the same. [" + fieldName + "] did change.", fieldValueBeforeUpdate, @@ -242,6 +313,31 @@ private static void assertFieldsExceptLastSeenDidNotUpdate( } } + private UpdateResponse awaitCancelConnectorSyncJob(String syncJobId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorSyncJobIndexService.cancelConnectorSyncJob(syncJobId, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse updateResponse) { + resp.set(updateResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for cancel request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from cancel request", resp.get()); + return resp.get(); + } + private Map getConnectorSyncJobSourceById(String syncJobId) throws ExecutionException, InterruptedException, TimeoutException { GetRequest getRequest = new GetRequest(ConnectorSyncJobIndexService.CONNECTOR_SYNC_JOB_INDEX_NAME, syncJobId); @@ -357,4 +453,5 @@ public void onFailure(Exception e) { return response; } + } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index e9ff95967e626..4fa1b9122284d 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -9,6 +9,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; @@ -92,6 +93,10 @@ public static PostConnectorSyncJobAction.Response getRandomPostConnectorSyncJobA return new PostConnectorSyncJobAction.Response(randomAlphaOfLength(10)); } + public static CancelConnectorSyncJobAction.Request getRandomCancelConnectorSyncJobActionRequest() { + return new CancelConnectorSyncJobAction.Request(randomAlphaOfLength(10)); + } + public static CheckInConnectorSyncJobAction.Request getRandomCheckInConnectorSyncJobActionRequest() { return new CheckInConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..81f59a130ac70 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobActionRequestBWCSerializingTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class CancelConnectorSyncJobActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + CancelConnectorSyncJobAction.Request> { + @Override + protected Writeable.Reader instanceReader() { + return CancelConnectorSyncJobAction.Request::new; + } + + @Override + protected CancelConnectorSyncJobAction.Request createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomCancelConnectorSyncJobActionRequest(); + } + + @Override + protected CancelConnectorSyncJobAction.Request mutateInstance(CancelConnectorSyncJobAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected CancelConnectorSyncJobAction.Request doParseInstance(XContentParser parser) throws IOException { + return CancelConnectorSyncJobAction.Request.parse(parser); + } + + @Override + protected CancelConnectorSyncJobAction.Request mutateInstanceForVersion( + CancelConnectorSyncJobAction.Request instance, + TransportVersion version + ) { + return new CancelConnectorSyncJobAction.Request(instance.getConnectorSyncJobId()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..0dd8d452254dc --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobActionTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class CancelConnectorSyncJobActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSyncJobIdIsPresent_ExpectNoValidationError() { + CancelConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomCancelConnectorSyncJobActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExpectValidationError() { + CancelConnectorSyncJobAction.Request requestWithMissingConnectorId = new CancelConnectorSyncJobAction.Request(""); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..81c56e3345e28 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobActionTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportCancelConnectorSyncJobActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportCancelConnectorSyncJobAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportCancelConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testCancelConnectorSyncJob_ExpectNoWarnings() throws InterruptedException { + CancelConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomCancelConnectorSyncJobActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(CancelConnectorSyncJobAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for cancel request", requestTimedOut); + } + +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 2325a2db8c077..b9d005e695459 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -127,12 +127,13 @@ public class Constants { "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", + "cluster:admin/xpack/connector/update_pipeline", + "cluster:admin/xpack/connector/update_scheduling", + "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/check_in", - "cluster:admin/xpack/connector/update_filtering", - "cluster:admin/xpack/connector/update_pipeline", - "cluster:admin/xpack/connector/update_scheduling", + "cluster:admin/xpack/connector/sync_job/cancel", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", "cluster:admin/xpack/enrich/delete", From 7dc8a4b2c09239107fcc5e933d024b7318fa733a Mon Sep 17 00:00:00 2001 From: Tom Veasey Date: Fri, 1 Dec 2023 16:32:28 +0000 Subject: [PATCH 151/263] Relax KDE test assertion (#102878) Our assertion for approximate median was too stringent. Fixes #102876. --- .../org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java index 80d5a3ad71136..e4d30912050e3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/KDETests.java @@ -41,8 +41,8 @@ public void testCdfAndSf() { double median = kde.data()[kde.size() / 2]; KDE.ValueAndMagnitude cdf = kde.cdf(median); KDE.ValueAndMagnitude sf = kde.sf(median); - assertThat(cdf.value(), closeTo(0.5, 0.05)); - assertThat(sf.value(), closeTo(0.5, 0.05)); + assertThat(cdf.value(), closeTo(0.5, 0.1)); + assertThat(sf.value(), closeTo(0.5, 0.1)); } // Should approximately sum to 1.0 for some random data. From 09afa3602057dd9a447197df84d90c271507b41d Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Fri, 1 Dec 2023 18:03:06 +0100 Subject: [PATCH 152/263] ESQL: Make EvalBenchmarks executable again (#102854) --- .../compute/operator/EvalBenchmark.java | 12 ++-- .../xpack/esql/action/EsqlQueryResponse.java | 4 +- .../esql/enrich/EnrichLookupService.java | 4 +- .../function/scalar/conditional/Case.java | 4 +- .../function/scalar/multivalue/MvAvg.java | 4 +- .../function/scalar/multivalue/MvDedupe.java | 4 +- .../function/scalar/multivalue/MvMax.java | 4 +- .../function/scalar/multivalue/MvMedian.java | 4 +- .../function/scalar/multivalue/MvMin.java | 4 +- .../function/scalar/multivalue/MvSum.java | 4 +- .../function/scalar/nulls/Coalesce.java | 4 +- .../esql/optimizer/LogicalPlanOptimizer.java | 4 +- .../esql/plan/physical/EstimatesRowSize.java | 4 +- .../AbstractPhysicalOperationProviders.java | 2 +- .../esql/planner/LocalExecutionPlanner.java | 58 +------------------ .../xpack/esql/planner/PlannerUtils.java | 58 +++++++++++++++++++ .../esql/action/EsqlQueryResponseTests.java | 4 +- .../function/AbstractFunctionTestCase.java | 4 +- 18 files changed, 95 insertions(+), 91 deletions(-) diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 56a20594f1e6c..3a1142ad87d2f 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -27,11 +27,11 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; -import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; @@ -61,6 +61,11 @@ public class EvalBenchmark { private static final int BLOCK_LENGTH = 8 * 1024; + static final DriverContext driverContext = new DriverContext( + BigArrays.NON_RECYCLING_INSTANCE, + BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) + ); + static { // Smoke test all the expected values and force loading subclasses more like prod try { @@ -72,11 +77,6 @@ public class EvalBenchmark { } } - static final DriverContext driverContext = new DriverContext( - BigArrays.NON_RECYCLING_INSTANCE, - BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) - ); - @Param({ "abs", "add", "date_trunc", "equal_to_const", "long_equal_to_long", "long_equal_to_int", "mv_min", "mv_min_ascending" }) public String operation; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index b283231574540..e571713420950 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -45,7 +45,7 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.versionfield.Version; @@ -317,7 +317,7 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef */ private static Page valuesToPage(List dataTypes, List> values) { List results = dataTypes.stream() - .map(c -> LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size())) + .map(c -> PlannerUtils.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size())) .toList(); for (List row : values) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 8dc5bdaeca393..0f8fd70c3016b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -72,7 +72,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; @@ -262,7 +262,7 @@ private void doLookup( List fields = new ArrayList<>(extractFields.size()); for (int i = 0; i < extractFields.size(); i++) { NamedExpression extractField = extractFields.get(i); - final ElementType elementType = LocalExecutionPlanner.toElementType(extractField.dataType()); + final ElementType elementType = PlannerUtils.toElementType(extractField.dataType()); mergingTypes[i] = elementType; var loaders = BlockReaderFactories.loaders( List.of(searchContext), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index caef1fe0de627..0174eca9c1ddf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -17,7 +17,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.Nullability; @@ -155,7 +155,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - ElementType resultType = LocalExecutionPlanner.toElementType(dataType()); + ElementType resultType = PlannerUtils.toElementType(dataType()); List conditionsFactories = conditions.stream() .map(c -> new ConditionEvaluatorSupplier(toEvaluator.apply(c.condition), toEvaluator.apply(c.value))) .toList(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 0a6a5d50ee552..296229bab9b5a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -12,7 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -45,7 +45,7 @@ public DataType dataType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toElementType(field().dataType())) { + return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvAvgDoubleEvaluator.Factory(fieldEval); case INT -> new MvAvgIntEvaluator.Factory(fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index bda8faa62f7af..7d9b40ad0d24f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -11,7 +11,7 @@ import org.elasticsearch.compute.operator.MultivalueDedupe; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -43,7 +43,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return MultivalueDedupe.evaluator(LocalExecutionPlanner.toElementType(dataType()), fieldEval); + return MultivalueDedupe.evaluator(PlannerUtils.toElementType(dataType()), fieldEval); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index e404e4d9151f9..fafd8d6a584fa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -47,7 +47,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toSortableElementType(field().dataType())) { + return switch (PlannerUtils.toSortableElementType(field().dataType())) { case BOOLEAN -> new MvMaxBooleanEvaluator.Factory(fieldEval); case BYTES_REF -> new MvMaxBytesRefEvaluator.Factory(fieldEval); case DOUBLE -> new MvMaxDoubleEvaluator.Factory(fieldEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index e10cbdd86a072..b60885967264c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -14,7 +14,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -44,7 +44,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toElementType(field().dataType())) { + return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvMedianDoubleEvaluator.Factory(fieldEval); case INT -> new MvMedianIntEvaluator.Factory(fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index de78e52a19eb6..1ae2ef41191b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -47,7 +47,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toSortableElementType(field().dataType())) { + return switch (PlannerUtils.toSortableElementType(field().dataType())) { case BOOLEAN -> new MvMinBooleanEvaluator.Factory(fieldEval); case BYTES_REF -> new MvMinBytesRefEvaluator.Factory(fieldEval); case DOUBLE -> new MvMinDoubleEvaluator.Factory(fieldEval); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index f543a8ec3878b..a0abced909c48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -12,7 +12,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -39,7 +39,7 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return switch (LocalExecutionPlanner.toElementType(field().dataType())) { + return switch (PlannerUtils.toElementType(field().dataType())) { case DOUBLE -> new MvSumDoubleEvaluator.Factory(fieldEval); case INT -> new MvSumIntEvaluator.Factory(source(), fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index ea95971e1b7b6..43d4fff9c486d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -16,7 +16,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.Nullability; @@ -127,7 +127,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function x.get(context)).toList() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 3ae19ceef4d08..66654b78c3af4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -664,7 +664,7 @@ private static List aggsFromEmpty(List aggs) { // fill the boolean block later in LocalExecutionPlanner if (dataType != DataTypes.BOOLEAN) { // look for count(literal) with literal != null - var wrapper = BlockUtils.wrapperFor(blockFactory, LocalExecutionPlanner.toElementType(dataType), 1); + var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(dataType), 1); if (aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null)) { wrapper.accept(0L); } else { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java index 643d99696c80a..b79d7cc0fbdde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java @@ -10,7 +10,7 @@ import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -103,7 +103,7 @@ public String toString() { } static int estimateSize(DataType dataType) { - ElementType elementType = LocalExecutionPlanner.toElementType(dataType); + ElementType elementType = PlannerUtils.toElementType(dataType); return switch (elementType) { case BOOLEAN -> 1; case BYTES_REF -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 113e4b91232ae..a7d2c6cec50ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -301,7 +301,7 @@ HashAggregationOperator.GroupSpec toHashGroupSpec() { } ElementType elementType() { - return LocalExecutionPlanner.toElementType(attribute.dataType()); + return PlannerUtils.toElementType(attribute.dataType()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 0c4e10e91cb29..3d377497e17af 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -78,7 +78,6 @@ import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; @@ -87,7 +86,6 @@ import org.elasticsearch.xpack.ql.expression.NameId; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.Holder; @@ -273,58 +271,6 @@ private PhysicalOperation planFieldExtractNode(LocalExecutionPlannerContext cont return physicalOperationProviders.fieldExtractPhysicalOperation(fieldExtractExec, plan(fieldExtractExec.child(), context)); } - /** - * Map QL's {@link DataType} to the compute engine's {@link ElementType}. - */ - public static ElementType toElementType(DataType dataType) { - if (dataType == DataTypes.LONG || dataType == DataTypes.DATETIME || dataType == DataTypes.UNSIGNED_LONG) { - return ElementType.LONG; - } - if (dataType == DataTypes.INTEGER) { - return ElementType.INT; - } - if (dataType == DataTypes.DOUBLE) { - return ElementType.DOUBLE; - } - // unsupported fields are passed through as a BytesRef - if (dataType == DataTypes.KEYWORD - || dataType == DataTypes.TEXT - || dataType == DataTypes.IP - || dataType == DataTypes.SOURCE - || dataType == DataTypes.VERSION - || dataType == DataTypes.UNSUPPORTED) { - return ElementType.BYTES_REF; - } - if (dataType == DataTypes.NULL) { - return ElementType.NULL; - } - if (dataType == DataTypes.BOOLEAN) { - return ElementType.BOOLEAN; - } - if (dataType == EsQueryExec.DOC_DATA_TYPE) { - return ElementType.DOC; - } - if (dataType == EsqlDataTypes.GEO_POINT) { - return ElementType.LONG; - } - if (dataType == EsqlDataTypes.CARTESIAN_POINT) { - return ElementType.LONG; - } - throw EsqlIllegalArgumentException.illegalDataType(dataType); - } - - /** - * Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only. - * This specifically excludes GEO_POINT and CARTESIAN_POINT, which are backed by DataType.LONG - * but are not themselves sortable (the long can be sorted, but the sort order is not usually useful). - */ - public static ElementType toSortableElementType(DataType dataType) { - if (dataType == EsqlDataTypes.GEO_POINT || dataType == EsqlDataTypes.CARTESIAN_POINT) { - return ElementType.UNKNOWN; - } - return toElementType(dataType); - } - private PhysicalOperation planOutput(OutputExec outputExec, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(outputExec.child(), context); var output = outputExec.output(); @@ -422,7 +368,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte TopNEncoder[] encoders = new TopNEncoder[source.layout.numberOfChannels()]; List inverse = source.layout.inverse(); for (int channel = 0; channel < inverse.size(); channel++) { - elementTypes[channel] = toElementType(inverse.get(channel).type()); + elementTypes[channel] = PlannerUtils.toElementType(inverse.get(channel).type()); encoders[channel] = switch (inverse.get(channel).type().typeName()) { case "ip" -> TopNEncoder.IP; case "text", "keyword" -> TopNEncoder.UTF8; @@ -519,7 +465,7 @@ private PhysicalOperation planGrok(GrokExec grok, LocalExecutionPlannerContext c ElementType[] types = new ElementType[extractedFields.size()]; for (int i = 0; i < extractedFields.size(); i++) { Attribute extractedField = extractedFields.get(i); - ElementType type = toElementType(extractedField.dataType()); + ElementType type = PlannerUtils.toElementType(extractedField.dataType()); fieldToPos.put(extractedField.name(), i); fieldToType.put(extractedField.name(), type); types[i] = type; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 201f3365b78a7..adf684d573cd1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -8,13 +8,16 @@ package org.elasticsearch.xpack.esql.planner; import org.elasticsearch.common.Strings; +import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; @@ -24,12 +27,15 @@ import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.SearchStats; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.AttributeSet; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.Holder; import org.elasticsearch.xpack.ql.util.Queries; @@ -164,4 +170,56 @@ static QueryBuilder detectFilter(PhysicalPlan plan, String fieldName) { return Queries.combine(FILTER, asList(requestFilter)); } + + /** + * Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only. + * This specifically excludes GEO_POINT and CARTESIAN_POINT, which are backed by DataType.LONG + * but are not themselves sortable (the long can be sorted, but the sort order is not usually useful). + */ + public static ElementType toSortableElementType(DataType dataType) { + if (dataType == EsqlDataTypes.GEO_POINT || dataType == EsqlDataTypes.CARTESIAN_POINT) { + return ElementType.UNKNOWN; + } + return toElementType(dataType); + } + + /** + * Map QL's {@link DataType} to the compute engine's {@link ElementType}. + */ + public static ElementType toElementType(DataType dataType) { + if (dataType == DataTypes.LONG || dataType == DataTypes.DATETIME || dataType == DataTypes.UNSIGNED_LONG) { + return ElementType.LONG; + } + if (dataType == DataTypes.INTEGER) { + return ElementType.INT; + } + if (dataType == DataTypes.DOUBLE) { + return ElementType.DOUBLE; + } + // unsupported fields are passed through as a BytesRef + if (dataType == DataTypes.KEYWORD + || dataType == DataTypes.TEXT + || dataType == DataTypes.IP + || dataType == DataTypes.SOURCE + || dataType == DataTypes.VERSION + || dataType == DataTypes.UNSUPPORTED) { + return ElementType.BYTES_REF; + } + if (dataType == DataTypes.NULL) { + return ElementType.NULL; + } + if (dataType == DataTypes.BOOLEAN) { + return ElementType.BOOLEAN; + } + if (dataType == EsQueryExec.DOC_DATA_TYPE) { + return ElementType.DOC; + } + if (dataType == EsqlDataTypes.GEO_POINT) { + return ElementType.LONG; + } + if (dataType == EsqlDataTypes.CARTESIAN_POINT) { + return ElementType.LONG; + } + throw EsqlIllegalArgumentException.illegalDataType(dataType); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index f040933e01410..25083268a3761 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -114,7 +114,7 @@ private EsqlQueryResponse.Profile randomProfile() { private Page randomPage(List columns) { return new Page(columns.stream().map(c -> { - Block.Builder builder = LocalExecutionPlanner.toElementType(EsqlDataTypes.fromName(c.type())).newBlockBuilder(1, blockFactory); + Block.Builder builder = PlannerUtils.toElementType(EsqlDataTypes.fromName(c.type())).newBlockBuilder(1, blockFactory); switch (c.type()) { case "unsigned_long", "long" -> ((LongBlock.Builder) builder).appendLong(randomLong()); case "integer" -> ((IntBlock.Builder) builder).appendInt(randomInt()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 6e1b9487d1c9c..81f2fa98be8cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -37,7 +37,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.optimizer.FoldNull; import org.elasticsearch.xpack.esql.planner.Layout; -import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; @@ -377,7 +377,7 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } try { for (int b = 0; b < data.size(); b++) { - ElementType elementType = LocalExecutionPlanner.toElementType(data.get(b).type()); + ElementType elementType = PlannerUtils.toElementType(data.get(b).type()); try (Block.Builder builder = elementType.newBlockBuilder(positions, inputBlockFactory)) { for (int p = 0; p < positions; p++) { if (nullPositions.contains(p)) { From 85514b3d350cebb450edd010e8d71bd60074ecdb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 1 Dec 2023 18:24:29 +0100 Subject: [PATCH 153/263] Track rescorer type in search usage stats. (#102771) --- .../admin/cluster/stats/ClusterStatsIT.java | 20 ++++- .../org/elasticsearch/TransportVersions.java | 1 + .../admin/cluster/stats/SearchUsageStats.java | 28 ++++++- .../search/builder/SearchSourceBuilder.java | 4 +- .../search/rescore/RescorerBuilder.java | 4 +- .../org/elasticsearch/usage/SearchUsage.java | 15 ++++ .../usage/SearchUsageHolder.java | 7 ++ .../cluster/stats/SearchUsageStatsTests.java | 80 +++++++++++++++---- .../rescore/QueryRescorerBuilderTests.java | 46 +++++++++-- .../ClusterStatsMonitoringDocTests.java | 1 + 10 files changed, 173 insertions(+), 33 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index 902f74ef778a0..93fc17a9a02eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -22,11 +22,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.monitor.os.OsStats; import org.elasticsearch.node.NodeRoleSettings; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -352,16 +354,26 @@ public void testSearchUsageStats() throws IOException { ); getRestClient().performRequest(request); } + { + Request request = new Request("GET", "/_search"); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(QueryBuilders.termQuery("field", "value")) + .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))); + request.setJsonEntity(Strings.toString(searchSourceBuilder)); + getRestClient().performRequest(request); + } SearchUsageStats stats = clusterAdmin().prepareClusterStats().get().getIndicesStats().getSearchUsageStats(); - assertEquals(5, stats.getTotalSearchCount()); + assertEquals(6, stats.getTotalSearchCount()); assertEquals(4, stats.getQueryUsage().size()); assertEquals(1, stats.getQueryUsage().get("match").longValue()); - assertEquals(2, stats.getQueryUsage().get("term").longValue()); + assertEquals(3, stats.getQueryUsage().get("term").longValue()); assertEquals(1, stats.getQueryUsage().get("range").longValue()); assertEquals(1, stats.getQueryUsage().get("bool").longValue()); - assertEquals(2, stats.getSectionsUsage().size()); - assertEquals(4, stats.getSectionsUsage().get("query").longValue()); + assertEquals(3, stats.getSectionsUsage().size()); + assertEquals(5, stats.getSectionsUsage().get("query").longValue()); assertEquals(1, stats.getSectionsUsage().get("aggs").longValue()); + assertEquals(1, stats.getSectionsUsage().get("rescore").longValue()); + assertEquals(1, stats.getRescorerUsage().size()); + assertEquals(1, stats.getRescorerUsage().get("query").longValue()); } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 4627a3d907133..b6e204f3839f7 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -182,6 +182,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_GET_MULTIPLE_MODELS = def(8_549_00_0); public static final TransportVersion INFERENCE_SERVICE_RESULTS_ADDED = def(8_550_00_0); public static final TransportVersion ESQL_PROFILE = def(8_551_00_0); + public static final TransportVersion CLUSTER_STATS_RESCORER_USAGE_ADDED = def(8_552_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java index 372ca49a252c8..aa49e9f1ea01f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStats.java @@ -21,6 +21,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.TransportVersions.CLUSTER_STATS_RESCORER_USAGE_ADDED; + /** * Holds a snapshot of the search usage statistics. * Used to hold the stats for a single node that's part of a {@link ClusterStatsNodeResponse}, as well as to @@ -29,6 +31,7 @@ public final class SearchUsageStats implements Writeable, ToXContentFragment { private long totalSearchCount; private final Map queries; + private final Map rescorers; private final Map sections; /** @@ -38,22 +41,27 @@ public SearchUsageStats() { this.totalSearchCount = 0L; this.queries = new HashMap<>(); this.sections = new HashMap<>(); + this.rescorers = new HashMap<>(); } /** * Creates a new stats instance with the provided info. The expectation is that when a new instance is created using * this constructor, the provided stats are final and won't be modified further. */ - public SearchUsageStats(Map queries, Map sections, long totalSearchCount) { + public SearchUsageStats(Map queries, Map rescorers, Map sections, long totalSearchCount) { this.totalSearchCount = totalSearchCount; this.queries = queries; this.sections = sections; + this.rescorers = rescorers; } public SearchUsageStats(StreamInput in) throws IOException { this.queries = in.readMap(StreamInput::readLong); this.sections = in.readMap(StreamInput::readLong); this.totalSearchCount = in.readVLong(); + this.rescorers = in.getTransportVersion().onOrAfter(CLUSTER_STATS_RESCORER_USAGE_ADDED) + ? in.readMap(StreamInput::readLong) + : Map.of(); } @Override @@ -61,6 +69,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(queries, StreamOutput::writeLong); out.writeMap(sections, StreamOutput::writeLong); out.writeVLong(totalSearchCount); + + if (out.getTransportVersion().onOrAfter(CLUSTER_STATS_RESCORER_USAGE_ADDED)) { + out.writeMap(rescorers, StreamOutput::writeLong); + } } /** @@ -68,6 +80,7 @@ public void writeTo(StreamOutput out) throws IOException { */ public void add(SearchUsageStats stats) { stats.queries.forEach((query, count) -> queries.merge(query, count, Long::sum)); + stats.rescorers.forEach((rescorer, count) -> rescorers.merge(rescorer, count, Long::sum)); stats.sections.forEach((query, count) -> sections.merge(query, count, Long::sum)); this.totalSearchCount += stats.totalSearchCount; } @@ -79,6 +92,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws { builder.field("queries"); builder.map(queries); + builder.field("rescorers"); + builder.map(rescorers); builder.field("sections"); builder.map(sections); } @@ -90,6 +105,10 @@ public Map getQueryUsage() { return Collections.unmodifiableMap(queries); } + public Map getRescorerUsage() { + return Collections.unmodifiableMap(rescorers); + } + public Map getSectionsUsage() { return Collections.unmodifiableMap(sections); } @@ -107,12 +126,15 @@ public boolean equals(Object o) { return false; } SearchUsageStats that = (SearchUsageStats) o; - return totalSearchCount == that.totalSearchCount && queries.equals(that.queries) && sections.equals(that.sections); + return totalSearchCount == that.totalSearchCount + && queries.equals(that.queries) + && rescorers.equals(that.rescorers) + && sections.equals(that.sections); } @Override public int hashCode() { - return Objects.hash(totalSearchCount, queries, sections); + return Objects.hash(totalSearchCount, queries, rescorers, sections); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 4fd20387004aa..c7077e4c867b0 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -1411,7 +1411,7 @@ private SearchSourceBuilder parseXContent(XContentParser parser, boolean checkTr sorts = new ArrayList<>(SortBuilder.fromXContent(parser)); } else if (RESCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { rescoreBuilders = new ArrayList<>(); - rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser)); + rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage)); searchUsage.trackSectionUsage(RESCORE_FIELD.getPreferredName()); } else if (EXT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { extBuilders = new ArrayList<>(); @@ -1498,7 +1498,7 @@ private SearchSourceBuilder parseXContent(XContentParser parser, boolean checkTr } else if (RESCORE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { rescoreBuilders = new ArrayList<>(); while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) { - rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser)); + rescoreBuilders.add(RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage)); } searchUsage.trackSectionUsage(RESCORE_FIELD.getPreferredName()); } else if (STATS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java index 897c14409b5fd..76ee7e09ad870 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.Objects; +import java.util.function.Consumer; /** * The abstract base builder for instances of {@link RescorerBuilder}. @@ -67,7 +68,7 @@ public Integer windowSize() { return windowSize; } - public static RescorerBuilder parseFromXContent(XContentParser parser) throws IOException { + public static RescorerBuilder parseFromXContent(XContentParser parser, Consumer rescorerNameConsumer) throws IOException { String fieldName = null; RescorerBuilder rescorer = null; Integer windowSize = null; @@ -83,6 +84,7 @@ public static RescorerBuilder parseFromXContent(XContentParser parser) throws } } else if (token == XContentParser.Token.START_OBJECT) { rescorer = parser.namedObject(RescorerBuilder.class, fieldName, null); + rescorerNameConsumer.accept(fieldName); } else { throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); } diff --git a/server/src/main/java/org/elasticsearch/usage/SearchUsage.java b/server/src/main/java/org/elasticsearch/usage/SearchUsage.java index 8c4e7a18269d6..f17dba1746cf8 100644 --- a/server/src/main/java/org/elasticsearch/usage/SearchUsage.java +++ b/server/src/main/java/org/elasticsearch/usage/SearchUsage.java @@ -17,6 +17,7 @@ */ public final class SearchUsage { private final Set queries = new HashSet<>(); + private final Set rescorers = new HashSet<>(); private final Set sections = new HashSet<>(); /** @@ -33,6 +34,13 @@ public void trackSectionUsage(String section) { sections.add(section); } + /** + * Track the usage of the provided rescorer + */ + public void trackRescorerUsage(String name) { + rescorers.add(name); + } + /** * Returns the query types that have been used at least once in the tracked search request */ @@ -40,6 +48,13 @@ public Set getQueryUsage() { return Collections.unmodifiableSet(queries); } + /** + * Returns the rescorer types that have been used at least once in the tracked search request + */ + public Set getRescorerUsage() { + return Collections.unmodifiableSet(rescorers); + } + /** * Returns the search section names that have been used at least once in the tracked search request */ diff --git a/server/src/main/java/org/elasticsearch/usage/SearchUsageHolder.java b/server/src/main/java/org/elasticsearch/usage/SearchUsageHolder.java index 1ceaae4f8eb58..ef7d9b3c0a291 100644 --- a/server/src/main/java/org/elasticsearch/usage/SearchUsageHolder.java +++ b/server/src/main/java/org/elasticsearch/usage/SearchUsageHolder.java @@ -24,6 +24,7 @@ public final class SearchUsageHolder { private final LongAdder totalSearchCount = new LongAdder(); private final Map queriesUsage = new ConcurrentHashMap<>(); + private final Map rescorersUsage = new ConcurrentHashMap<>(); private final Map sectionsUsage = new ConcurrentHashMap<>(); SearchUsageHolder() {} @@ -39,6 +40,9 @@ public void updateUsage(SearchUsage searchUsage) { for (String query : searchUsage.getQueryUsage()) { queriesUsage.computeIfAbsent(query, q -> new LongAdder()).increment(); } + for (String rescorer : searchUsage.getRescorerUsage()) { + rescorersUsage.computeIfAbsent(rescorer, q -> new LongAdder()).increment(); + } } /** @@ -49,8 +53,11 @@ public SearchUsageStats getSearchUsageStats() { queriesUsage.forEach((query, adder) -> queriesUsageMap.put(query, adder.longValue())); Map sectionsUsageMap = Maps.newMapWithExpectedSize(sectionsUsage.size()); sectionsUsage.forEach((query, adder) -> sectionsUsageMap.put(query, adder.longValue())); + Map rescorersUsageMap = Maps.newMapWithExpectedSize(rescorersUsage.size()); + rescorersUsage.forEach((query, adder) -> rescorersUsageMap.put(query, adder.longValue())); return new SearchUsageStats( Collections.unmodifiableMap(queriesUsageMap), + Collections.unmodifiableMap(rescorersUsageMap), Collections.unmodifiableMap(sectionsUsageMap), totalSearchCount.longValue() ); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java index 1e8bc1e17d525..10419719a5ed1 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java @@ -8,9 +8,11 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.TransportVersionUtils; import java.io.IOException; import java.util.HashMap; @@ -30,6 +32,8 @@ public class SearchUsageStatsTests extends AbstractWireSerializingTestCase RESCORER_TYPES = List.of("query", "learn_to_rank"); + private static final List SECTIONS = List.of( "highlight", "query", @@ -62,59 +66,105 @@ private static Map randomQueryUsage(int size) { return queryUsage; } + private static Map randomRescorerUsage(int size) { + Map rescorerUsage = new HashMap<>(); + while (rescorerUsage.size() < size) { + rescorerUsage.put(randomFrom(RESCORER_TYPES), randomLongBetween(1, Long.MAX_VALUE)); + } + return rescorerUsage; + } + @Override protected SearchUsageStats createTestInstance() { if (randomBoolean()) { return new SearchUsageStats(); } return new SearchUsageStats( - randomQueryUsage(randomIntBetween(0, 4)), - randomSectionsUsage(randomIntBetween(0, 4)), + randomQueryUsage(randomIntBetween(0, QUERY_TYPES.size())), + randomRescorerUsage(randomIntBetween(0, RESCORER_TYPES.size())), + randomSectionsUsage(randomIntBetween(0, SECTIONS.size())), randomLongBetween(10, Long.MAX_VALUE) ); } @Override protected SearchUsageStats mutateInstance(SearchUsageStats instance) { - if (randomBoolean()) { - return new SearchUsageStats( - randomQueryUsage(instance.getQueryUsage().size() + 1), + int i = randomInt(4); + return switch (i) { + case 0 -> new SearchUsageStats( + randomValueOtherThan(instance.getQueryUsage(), () -> randomQueryUsage(randomIntBetween(0, QUERY_TYPES.size()))), + instance.getRescorerUsage(), instance.getSectionsUsage(), instance.getTotalSearchCount() ); - } - if (randomBoolean()) { - return new SearchUsageStats( + case 1 -> new SearchUsageStats( instance.getQueryUsage(), - randomSectionsUsage(instance.getSectionsUsage().size() + 1), + randomValueOtherThan(instance.getRescorerUsage(), () -> randomRescorerUsage(randomIntBetween(0, RESCORER_TYPES.size()))), + instance.getSectionsUsage(), instance.getTotalSearchCount() ); - } - return new SearchUsageStats(instance.getQueryUsage(), instance.getSectionsUsage(), randomLongBetween(10, Long.MAX_VALUE)); + case 2 -> new SearchUsageStats( + instance.getQueryUsage(), + instance.getRescorerUsage(), + randomValueOtherThan(instance.getRescorerUsage(), () -> randomSectionsUsage(randomIntBetween(0, SECTIONS.size()))), + instance.getTotalSearchCount() + ); + default -> new SearchUsageStats( + instance.getQueryUsage(), + instance.getRescorerUsage(), + instance.getSectionsUsage(), + randomLongBetween(10, Long.MAX_VALUE) + ); + }; } public void testAdd() { SearchUsageStats searchUsageStats = new SearchUsageStats(); assertEquals(Map.of(), searchUsageStats.getQueryUsage()); + assertEquals(Map.of(), searchUsageStats.getRescorerUsage()); assertEquals(Map.of(), searchUsageStats.getSectionsUsage()); assertEquals(0, searchUsageStats.getTotalSearchCount()); - searchUsageStats.add(new SearchUsageStats(Map.of("match", 10L), Map.of("query", 10L), 10L)); + searchUsageStats.add(new SearchUsageStats(Map.of("match", 10L), Map.of("query", 5L), Map.of("query", 10L), 10L)); assertEquals(Map.of("match", 10L), searchUsageStats.getQueryUsage()); assertEquals(Map.of("query", 10L), searchUsageStats.getSectionsUsage()); + assertEquals(Map.of("query", 5L), searchUsageStats.getRescorerUsage()); assertEquals(10L, searchUsageStats.getTotalSearchCount()); - searchUsageStats.add(new SearchUsageStats(Map.of("term", 1L, "match", 1L), Map.of("query", 10L, "knn", 1L), 10L)); + searchUsageStats.add( + new SearchUsageStats( + Map.of("term", 1L, "match", 1L), + Map.of("query", 5L, "learn_to_rank", 2L), + Map.of("query", 10L, "knn", 1L), + 10L + ) + ); assertEquals(Map.of("match", 11L, "term", 1L), searchUsageStats.getQueryUsage()); assertEquals(Map.of("query", 20L, "knn", 1L), searchUsageStats.getSectionsUsage()); + assertEquals(Map.of("query", 10L, "learn_to_rank", 2L), searchUsageStats.getRescorerUsage()); assertEquals(20L, searchUsageStats.getTotalSearchCount()); } public void testToXContent() throws IOException { - SearchUsageStats searchUsageStats = new SearchUsageStats(Map.of("term", 1L), Map.of("query", 10L), 10L); + SearchUsageStats searchUsageStats = new SearchUsageStats(Map.of("term", 1L), Map.of("query", 2L), Map.of("query", 10L), 10L); assertEquals( - "{\"search\":{\"total\":10,\"queries\":{\"term\":1},\"sections\":{\"query\":10}}}", + "{\"search\":{\"total\":10,\"queries\":{\"term\":1},\"rescorers\":{\"query\":2},\"sections\":{\"query\":10}}}", Strings.toString(searchUsageStats) ); } + + /** + * Test (de)serialization on all previous released versions + */ + public void testSerializationBWC() throws IOException { + for (TransportVersion version : TransportVersionUtils.allReleasedVersions()) { + SearchUsageStats testInstance = new SearchUsageStats( + randomQueryUsage(QUERY_TYPES.size()), + Map.of(), + randomSectionsUsage(SECTIONS.size()), + randomLongBetween(0, Long.MAX_VALUE) + ); + assertSerialization(testInstance, version); + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index b7979c70d0d52..0ade522ae1ffa 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.usage.SearchUsage; import org.elasticsearch.xcontent.NamedObjectNotFoundException; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -42,11 +43,13 @@ import org.junit.BeforeClass; import java.io.IOException; +import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; public class QueryRescorerBuilderTests extends ESTestCase { @@ -112,13 +115,15 @@ public void testFromXContent() throws IOException { } rescoreBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); + SearchUsage searchUsage = new SearchUsage(); try (XContentParser parser = createParser(shuffled)) { parser.nextToken(); - RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser); + RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage); assertNotSame(rescoreBuilder, secondRescoreBuilder); assertEquals(rescoreBuilder, secondRescoreBuilder); assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + assertEquals(searchUsage.getRescorerUsage(), Set.of("query")); } } } @@ -243,6 +248,7 @@ public MappedFieldType getFieldType(String name) { * test parsing exceptions for incorrect rescorer syntax */ public void testUnknownFieldsExpection() throws IOException { + SearchUsage searchUsage = new SearchUsage(); String rescoreElement = """ { @@ -251,8 +257,12 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(NamedObjectNotFoundException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + NamedObjectNotFoundException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("[3:27] unknown field [bad_rescorer_name]", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ { @@ -260,8 +270,12 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + ParsingException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ @@ -271,14 +285,22 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + ParsingException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = "{ }"; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + ParsingException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("missing rescore type", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ @@ -288,8 +310,12 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - XContentParseException e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); + XContentParseException e = expectThrows( + XContentParseException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertEquals("[3:17] [query] unknown field [bad_fieldname]", e.getMessage()); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ @@ -299,8 +325,12 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - Exception e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); + Exception e = expectThrows( + XContentParseException.class, + () -> RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage) + ); assertThat(e.getMessage(), containsString("[query] failed to parse field [rescore_query]")); + assertThat(searchUsage.getRescorerUsage(), empty()); } rescoreElement = """ @@ -310,7 +340,7 @@ public void testUnknownFieldsExpection() throws IOException { } """; try (XContentParser parser = createParser(rescoreElement)) { - RescorerBuilder.parseFromXContent(parser); + RescorerBuilder.parseFromXContent(parser, searchUsage::trackRescorerUsage); } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index c782f25fdad4c..d88adea7aaef3 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -583,6 +583,7 @@ public void testToXContent() throws IOException { "search" : { "total" : 0, "queries" : {}, + "rescorers" : {}, "sections" : {} }, "dense_vector": { From a4cdaf390cb0ef4dcfb710314d51826b44a8ee87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Fri, 1 Dec 2023 18:30:10 +0100 Subject: [PATCH 154/263] [LTR] Missing param handling improvements (#102850) --- .../trainedmodel/LearnToRankConfig.java | 56 ++++- .../ltr/QueryExtractorBuilder.java | 31 ++- .../datafeed/DatafeedConfigBuilderTests.java | 4 +- .../core/ml/datafeed/DatafeedConfigTests.java | 4 +- .../core/ml/datafeed/DatafeedUpdateTests.java | 6 +- .../trainedmodel/LearnToRankConfigTests.java | 58 +++++- .../ltr/QueryExtractorBuilderTests.java | 21 +- .../core/ml/utils/QueryProviderTests.java | 4 +- .../ml/inference/ltr/LearnToRankService.java | 32 ++- .../ltr/LearnToRankServiceTests.java | 194 ++++++++++-------- 10 files changed, 281 insertions(+), 129 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java index 89dcf746d7927..ba617ca8d04b8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java @@ -18,11 +18,14 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObjectHelper; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -33,7 +36,9 @@ public class LearnToRankConfig extends RegressionConfig implements Rewriteable LENIENT_PARSER = createParser(true); private static final ObjectParser STRICT_PARSER = createParser(false); @@ -51,6 +56,7 @@ private static ObjectParser createParser(boo b -> {}, FEATURE_EXTRACTORS ); + parser.declareObject(Builder::setParamsDefaults, (p, c) -> p.map(), DEFAULT_PARAMS); return parser; } @@ -67,8 +73,13 @@ public static Builder builder(LearnToRankConfig config) { } private final List featureExtractorBuilders; + private final Map paramsDefaults; - public LearnToRankConfig(Integer numTopFeatureImportanceValues, List featureExtractorBuilders) { + public LearnToRankConfig( + Integer numTopFeatureImportanceValues, + List featureExtractorBuilders, + Map paramsDefaults + ) { super(DEFAULT_RESULTS_FIELD, numTopFeatureImportanceValues); if (featureExtractorBuilders != null) { Set featureNames = featureExtractorBuilders.stream() @@ -80,23 +91,40 @@ public LearnToRankConfig(Integer numTopFeatureImportanceValues, List getFeatureExtractorBuilders() { return featureExtractorBuilders; } + public List getQueryFeatureExtractorBuilders() { + List queryExtractorBuilders = new ArrayList<>(); + for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : featureExtractorBuilders) { + if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { + queryExtractorBuilders.add(queryExtractorBuilder); + } + } + + return queryExtractorBuilders; + } + @Override public String getResultsField() { return DEFAULT_RESULTS_FIELD; } + public Map getParamsDefaults() { + return paramsDefaults; + } + @Override public boolean isAllocateOnly() { return false; @@ -126,6 +154,7 @@ public String getWriteableName() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeNamedWriteableCollection(featureExtractorBuilders); + out.writeGenericMap(paramsDefaults); } @Override @@ -146,6 +175,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws featureExtractorBuilders ); } + + if (paramsDefaults.isEmpty() == false) { + builder.field(DEFAULT_PARAMS.getPreferredName(), paramsDefaults); + } + builder.endObject(); return builder; } @@ -156,12 +190,13 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; LearnToRankConfig that = (LearnToRankConfig) o; - return Objects.equals(featureExtractorBuilders, that.featureExtractorBuilders); + return Objects.equals(featureExtractorBuilders, that.featureExtractorBuilders) + && Objects.equals(paramsDefaults, that.paramsDefaults); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), featureExtractorBuilders); + return Objects.hash(super.hashCode(), featureExtractorBuilders, paramsDefaults); } @Override @@ -197,7 +232,7 @@ public LearnToRankConfig rewrite(QueryRewriteContext ctx) throws IOException { rewritten |= (rewrittenExtractor != extractorBuilder); } if (rewritten) { - return new LearnToRankConfig(getNumTopFeatureImportanceValues(), rewrittenExtractors); + return new LearnToRankConfig(getNumTopFeatureImportanceValues(), rewrittenExtractors, paramsDefaults); } return this; } @@ -205,12 +240,14 @@ public LearnToRankConfig rewrite(QueryRewriteContext ctx) throws IOException { public static class Builder { private Integer numTopFeatureImportanceValues; private List learnToRankFeatureExtractorBuilders; + private Map paramsDefaults = Map.of(); Builder() {} Builder(LearnToRankConfig config) { this.numTopFeatureImportanceValues = config.getNumTopFeatureImportanceValues(); this.learnToRankFeatureExtractorBuilders = config.featureExtractorBuilders; + this.paramsDefaults = config.getParamsDefaults(); } public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceValues) { @@ -225,8 +262,13 @@ public Builder setLearnToRankFeatureExtractorBuilders( return this; } + public Builder setParamsDefaults(Map paramsDefaults) { + this.paramsDefaults = paramsDefaults; + return this; + } + public LearnToRankConfig build() { - return new LearnToRankConfig(numTopFeatureImportanceValues, learnToRankFeatureExtractorBuilders); + return new LearnToRankConfig(numTopFeatureImportanceValues, learnToRankFeatureExtractorBuilders, paramsDefaults); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java index f2839148d6a60..a138fbbb98ba1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java @@ -18,35 +18,44 @@ import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import java.io.IOException; +import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.INFERENCE_CONFIG_QUERY_BAD_FORMAT; import static org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper.requireNonNull; -public record QueryExtractorBuilder(String featureName, QueryProvider query) implements LearnToRankFeatureExtractorBuilder { +public record QueryExtractorBuilder(String featureName, QueryProvider query, float defaultScore) + implements + LearnToRankFeatureExtractorBuilder { public static final ParseField NAME = new ParseField("query_extractor"); public static final ParseField FEATURE_NAME = new ParseField("feature_name"); public static final ParseField QUERY = new ParseField("query"); + public static final ParseField DEFAULT_SCORE = new ParseField("default_score"); + + public static float DEFAULT_SCORE_DEFAULT = 0f; private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( NAME.getPreferredName(), - a -> new QueryExtractorBuilder((String) a[0], (QueryProvider) a[1]) + a -> new QueryExtractorBuilder((String) a[0], (QueryProvider) a[1], Objects.requireNonNullElse((Float) a[2], DEFAULT_SCORE_DEFAULT)) ); private static final ConstructingObjectParser LENIENT_PARSER = new ConstructingObjectParser<>( NAME.getPreferredName(), true, - a -> new QueryExtractorBuilder((String) a[0], (QueryProvider) a[1]) + a -> new QueryExtractorBuilder((String) a[0], (QueryProvider) a[1], Objects.requireNonNullElse((Float) a[2], DEFAULT_SCORE_DEFAULT)) ); static { PARSER.declareString(constructorArg(), FEATURE_NAME); PARSER.declareObject(constructorArg(), (p, c) -> QueryProvider.fromXContent(p, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT), QUERY); + PARSER.declareFloat(optionalConstructorArg(), DEFAULT_SCORE); LENIENT_PARSER.declareString(constructorArg(), FEATURE_NAME); LENIENT_PARSER.declareObject( constructorArg(), (p, c) -> QueryProvider.fromXContent(p, true, INFERENCE_CONFIG_QUERY_BAD_FORMAT), QUERY ); + LENIENT_PARSER.declareFloat(optionalConstructorArg(), DEFAULT_SCORE); } public static QueryExtractorBuilder fromXContent(XContentParser parser, Object context) { @@ -55,18 +64,29 @@ public static QueryExtractorBuilder fromXContent(XContentParser parser, Object c } public QueryExtractorBuilder(String featureName, QueryProvider query) { + this(featureName, query, DEFAULT_SCORE_DEFAULT); + } + + public QueryExtractorBuilder(String featureName, QueryProvider query, float defaultScore) { this.featureName = requireNonNull(featureName, FEATURE_NAME); this.query = requireNonNull(query, QUERY); + if (defaultScore < 0f) { + throw new IllegalArgumentException("[" + NAME + "] requires defaultScore to be positive."); + } + this.defaultScore = defaultScore; } public QueryExtractorBuilder(StreamInput input) throws IOException { - this(input.readString(), QueryProvider.fromStream(input)); + this(input.readString(), QueryProvider.fromStream(input), input.readFloat()); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(FEATURE_NAME.getPreferredName(), featureName); + if (defaultScore > 0f) { + builder.field(DEFAULT_SCORE.getPreferredName(), defaultScore); + } builder.field(QUERY.getPreferredName(), query.getQuery()); builder.endObject(); return builder; @@ -81,6 +101,7 @@ public String getWriteableName() { public void writeTo(StreamOutput out) throws IOException { out.writeString(featureName); query.writeTo(out); + out.writeFloat(defaultScore); } @Override @@ -106,6 +127,6 @@ public QueryExtractorBuilder rewrite(QueryRewriteContext ctx) throws IOException if (rewritten == query) { return this; } - return new QueryExtractorBuilder(featureName, rewritten); + return new QueryExtractorBuilder(featureName, rewritten, defaultScore); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigBuilderTests.java index 6a722896970e6..7970a6c3fbc5a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigBuilderTests.java @@ -30,7 +30,7 @@ import java.util.Map; import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfigTests.randomStringList; -import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createRandomValidQueryProvider; +import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createTestQueryProvider; public class DatafeedConfigBuilderTests extends AbstractWireSerializingTestCase { @@ -44,7 +44,7 @@ public static DatafeedConfig.Builder createRandomizedDatafeedConfigBuilder(Strin } builder.setIndices(randomStringList(1, 10)); if (randomBoolean()) { - builder.setQueryProvider(createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); + builder.setQueryProvider(createTestQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); } boolean addScriptFields = randomBoolean(); if (addScriptFields) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index 0d487f27cd903..c42b540f9461e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -70,7 +70,7 @@ import static org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfigBuilderTests.createRandomizedDatafeedConfigBuilder; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.DATAFEED_AGGREGATIONS_INTERVAL_MUST_BE_GREATER_THAN_ZERO; -import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createRandomValidQueryProvider; +import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createTestQueryProvider; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -794,7 +794,7 @@ public void testSerializationOfComplexAggs() throws IOException { .subAggregation(bucketScriptPipelineAggregationBuilder); DatafeedConfig.Builder datafeedConfigBuilder = createDatafeedBuilderWithDateHistogram(dateHistogram); datafeedConfigBuilder.setQueryProvider( - createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)) + createTestQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)) ); DatafeedConfig datafeedConfig = datafeedConfigBuilder.build(); AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder().addAggregator(dateHistogram); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index f6c859830119b..682fbc81b4592 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -60,7 +60,7 @@ import java.util.Map; import static org.elasticsearch.xpack.core.ml.datafeed.AggProviderTests.createRandomValidAggProvider; -import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createRandomValidQueryProvider; +import static org.elasticsearch.xpack.core.ml.utils.QueryProviderTests.createTestQueryProvider; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; @@ -97,7 +97,7 @@ public static DatafeedUpdate createRandomized(String datafeedId, @Nullable Dataf builder.setIndices(DatafeedConfigTests.randomStringList(1, 10)); } if (randomBoolean()) { - builder.setQuery(createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); + builder.setQuery(createTestQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); } if (randomBoolean()) { int scriptsSize = randomInt(3); @@ -264,7 +264,7 @@ public void testApply_givenFullUpdateNoAggregations() { DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed"); datafeedBuilder.setIndices(Collections.singletonList("i_1")); DatafeedConfig datafeed = datafeedBuilder.build(); - QueryProvider queryProvider = createRandomValidQueryProvider("a", "b"); + QueryProvider queryProvider = createTestQueryProvider("a", "b"); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId()); update.setIndices(Collections.singletonList("i_2")); update.setQueryDelay(TimeValue.timeValueSeconds(42)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java index 16e56b5dc73bd..1059af21ab7eb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.SearchModule; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -28,7 +29,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.function.Predicate; @@ -45,7 +45,8 @@ public static LearnToRankConfig randomLearnToRankConfig() { randomBoolean() ? null : randomIntBetween(0, 10), randomBoolean() ? null - : Stream.generate(QueryExtractorBuilderTests::randomInstance).limit(randomInt(5)).collect(Collectors.toList()) + : Stream.generate(QueryExtractorBuilderTests::randomInstance).limit(randomInt(5)).collect(Collectors.toList()), + randomBoolean() ? null : randomMap(0, 10, () -> Tuple.tuple(randomIdentifier(), randomIdentifier())) ); } @@ -61,7 +62,45 @@ protected LearnToRankConfig createTestInstance() { @Override protected LearnToRankConfig mutateInstance(LearnToRankConfig instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 + int i = randomInt(2); + + LearnToRankConfig.Builder builder = LearnToRankConfig.builder(instance); + + switch (i) { + case 0 -> { + builder.setNumTopFeatureImportanceValues( + randomValueOtherThan( + instance.getNumTopFeatureImportanceValues(), + () -> randomBoolean() && instance.getNumTopFeatureImportanceValues() != 0 ? null : randomIntBetween(0, 10) + ) + ); + } + case 1 -> { + builder.setLearnToRankFeatureExtractorBuilders( + randomValueOtherThan( + instance.getFeatureExtractorBuilders(), + () -> randomBoolean() || instance.getFeatureExtractorBuilders().isEmpty() + ? Stream.generate(QueryExtractorBuilderTests::randomInstance) + .limit(randomIntBetween(1, 5)) + .collect(Collectors.toList()) + : null + ) + ); + } + case 2 -> { + builder.setParamsDefaults( + randomValueOtherThan( + instance.getParamsDefaults(), + () -> randomBoolean() || instance.getParamsDefaults().isEmpty() + ? randomMap(1, 10, () -> Tuple.tuple(randomIdentifier(), randomIdentifier())) + : null + ) + ); + } + default -> throw new AssertionError("Unexpected random test case"); + } + + return builder.build(); } @Override @@ -94,10 +133,11 @@ public void testDuplicateFeatureNames() { new TestValueExtractor("foo"), new TestValueExtractor("foo") ); - expectThrows( - IllegalArgumentException.class, - () -> new LearnToRankConfig(randomBoolean() ? null : randomIntBetween(0, 10), featureExtractorBuilderList) - ); + + LearnToRankConfig.Builder builder = LearnToRankConfig.builder(randomLearnToRankConfig()) + .setLearnToRankFeatureExtractorBuilders(featureExtractorBuilderList); + + expectThrows(IllegalArgumentException.class, () -> builder.build()); } @Override @@ -105,7 +145,7 @@ protected NamedXContentRegistry xContentRegistry() { List namedXContent = new ArrayList<>(); namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); - namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); + namedXContent.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); namedXContent.add( new NamedXContentRegistry.Entry( LearnToRankFeatureExtractorBuilder.class, @@ -119,7 +159,7 @@ protected NamedXContentRegistry xContentRegistry() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { List namedWriteables = new ArrayList<>(new MlInferenceNamedXContentProvider().getNamedWriteables()); - namedWriteables.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables()); + namedWriteables.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedWriteables()); namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); namedWriteables.add( new NamedWriteableRegistry.Entry( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilderTests.java index 07103175f927c..23cce17ba9bae 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilderTests.java @@ -26,7 +26,11 @@ public class QueryExtractorBuilderTests extends AbstractXContentSerializingTestC protected boolean lenient; public static QueryExtractorBuilder randomInstance() { - return new QueryExtractorBuilder(randomAlphaOfLength(10), QueryProviderTests.createRandomValidQueryProvider()); + return new QueryExtractorBuilder( + randomAlphaOfLength(10), + QueryProviderTests.createRandomValidQueryProvider(), + randomFrom(0f, randomFloat()) + ); } @Before @@ -56,10 +60,19 @@ protected QueryExtractorBuilder createTestInstance() { @Override protected QueryExtractorBuilder mutateInstance(QueryExtractorBuilder instance) throws IOException { - int i = randomInt(1); + int i = randomInt(2); return switch (i) { - case 0 -> new QueryExtractorBuilder(randomAlphaOfLength(10), instance.query()); - case 1 -> new QueryExtractorBuilder(instance.featureName(), QueryProviderTests.createRandomValidQueryProvider()); + case 0 -> new QueryExtractorBuilder(randomAlphaOfLength(10), instance.query(), instance.defaultScore()); + case 1 -> new QueryExtractorBuilder( + instance.featureName(), + QueryProviderTests.createRandomValidQueryProvider(), + instance.defaultScore() + ); + case 2 -> new QueryExtractorBuilder( + instance.featureName(), + instance.query(), + randomValueOtherThan(instance.defaultScore(), () -> randomFrom(0f, randomFloat())) + ); default -> throw new AssertionError("unknown random case for instance mutation"); }; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/QueryProviderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/QueryProviderTests.java index dc2f1b7d179f5..a0c2fe93a1a24 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/QueryProviderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/QueryProviderTests.java @@ -65,10 +65,10 @@ protected QueryProvider doParseInstance(XContentParser parser) throws IOExceptio } public static QueryProvider createRandomValidQueryProvider() { - return createRandomValidQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); + return createTestQueryProvider(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)); } - public static QueryProvider createRandomValidQueryProvider(String field, String value) { + public static QueryProvider createTestQueryProvider(String field, String value) { Map terms = Collections.singletonMap( BoolQueryBuilder.NAME, Collections.singletonMap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java index 42f7d8cf0a3b3..2f85000705d8a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java @@ -9,7 +9,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; @@ -37,11 +39,14 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import static java.util.Map.entry; +import static org.elasticsearch.common.xcontent.XContentHelper.mergeDefaults; import static org.elasticsearch.script.Script.DEFAULT_TEMPLATE_LANG; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.INFERENCE_CONFIG_QUERY_BAD_FORMAT; @@ -101,9 +106,6 @@ public void loadLearnToRankConfig(String modelId, Map params, Ac null, ActionListener.wrap(trainedModelConfig -> { if (trainedModelConfig.getInferenceConfig() instanceof LearnToRankConfig retrievedInferenceConfig) { - for (LearnToRankFeatureExtractorBuilder builder : retrievedInferenceConfig.getFeatureExtractorBuilders()) { - builder.validate(); - } listener.onResponse(applyParams(retrievedInferenceConfig, params)); return; } @@ -129,15 +131,18 @@ public void loadLearnToRankConfig(String modelId, Map params, Ac * * @throws IOException */ - private LearnToRankConfig applyParams(LearnToRankConfig config, Map params) throws IOException { + private LearnToRankConfig applyParams(LearnToRankConfig config, Map params) throws Exception { if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG) == false) { return config; } List featureExtractorBuilders = new ArrayList<>(); + Map mergedParams = new HashMap<>(Objects.requireNonNullElse(params, Map.of())); + mergeDefaults(mergedParams, config.getParamsDefaults()); + for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : config.getFeatureExtractorBuilders()) { - featureExtractorBuilders.add(applyParams(featureExtractorBuilder, params)); + featureExtractorBuilders.add(applyParams(featureExtractorBuilder, mergedParams)); } return LearnToRankConfig.builder(config).setLearnToRankFeatureExtractorBuilders(featureExtractorBuilders).build(); @@ -155,11 +160,13 @@ private LearnToRankConfig applyParams(LearnToRankConfig config, Map params - ) throws IOException { + ) throws Exception { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { - return applyParams(queryExtractorBuilder, params); + featureExtractorBuilder = applyParams(queryExtractorBuilder, params); } + featureExtractorBuilder.validate(); + return featureExtractorBuilder; } @@ -186,14 +193,16 @@ private QueryExtractorBuilder applyParams(QueryExtractorBuilder queryExtractorBu return new QueryExtractorBuilder( queryExtractorBuilder.featureName(), - QueryProvider.fromXContent(parser, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT) + QueryProvider.fromXContent(parser, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT), + queryExtractorBuilder.defaultScore() ); } catch (GeneralScriptException e) { if (e.getRootCause().getClass().getName().equals(MustacheInvalidParameterException.class.getName())) { // Can't use instanceof since it return unexpected result. return new QueryExtractorBuilder( queryExtractorBuilder.featureName(), - QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder()) + defaultQuery(queryExtractorBuilder.defaultScore()), + queryExtractorBuilder.defaultScore() ); } throw e; @@ -205,4 +214,9 @@ private String templateSource(QueryProvider queryProvider) throws IOException { return BytesReference.bytes(queryProvider.toXContent(configSourceBuilder, EMPTY_PARAMS)).utf8ToString(); } } + + private QueryProvider defaultQuery(float score) throws IOException { + QueryBuilder query = score == 0 ? new MatchNoneQueryBuilder() : new MatchAllQueryBuilder().boost(score); + return QueryProvider.fromParsedQuery(query); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java index a2cd0ff8856c6..cbe91ba874e6d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java @@ -7,11 +7,11 @@ package org.elasticsearch.xpack.ml.inference.ltr; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; @@ -28,17 +28,18 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import org.elasticsearch.xpack.core.ml.utils.QueryProviderTests; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; import static org.elasticsearch.script.Script.DEFAULT_TEMPLATE_LANG; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.argThat; @@ -51,7 +52,6 @@ public class LearnToRankServiceTests extends ESTestCase { public static final String GOOD_MODEL = "modelId"; public static final String BAD_MODEL = "badModel"; - public static final String TEMPLATED_GOOD_MODEL = "templatedModelId"; public static final TrainedModelConfig GOOD_MODEL_CONFIG = TrainedModelConfig.builder() .setModelId(GOOD_MODEL) .setInput(new TrainedModelInput(List.of("field1", "field2"))) @@ -62,9 +62,10 @@ public class LearnToRankServiceTests extends ESTestCase { new LearnToRankConfig( 2, List.of( - new QueryExtractorBuilder("feature_1", QueryProviderTests.createRandomValidQueryProvider("field_1", "foo")), - new QueryExtractorBuilder("feature_2", QueryProviderTests.createRandomValidQueryProvider("field_2", "bar")) - ) + new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "foo")), + new QueryExtractorBuilder("feature_2", QueryProviderTests.createTestQueryProvider("field_2", "bar")) + ), + Map.of() ) ) .build(); @@ -77,113 +78,102 @@ public class LearnToRankServiceTests extends ESTestCase { .setInferenceConfig(new RegressionConfig(null, null)) .build(); - public static final TrainedModelConfig TEMPLATED_GOOD_MODEL_CONFIG = new TrainedModelConfig.Builder(GOOD_MODEL_CONFIG).setModelId( - TEMPLATED_GOOD_MODEL - ) - .setInferenceConfig( - new LearnToRankConfig( - 2, - List.of( - new QueryExtractorBuilder("feature_1", QueryProviderTests.createRandomValidQueryProvider("field_1", "{{foo_param}}")), - new QueryExtractorBuilder("feature_2", QueryProviderTests.createRandomValidQueryProvider("field_2", "{{bar_param}}")) - ) - ) - ) - .build(); - @SuppressWarnings("unchecked") public void testLoadLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = new LearnToRankService( - mockModelLoadingService(), - mockTrainedModelProvider(), - mockScriptService(), - xContentRegistry() - ); + LearnToRankService learnToRankService = getTestLearnToRankService(); ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(GOOD_MODEL, Collections.emptyMap(), listener); + learnToRankService.loadLearnToRankConfig(GOOD_MODEL, Map.of(), listener); verify(listener).onResponse(eq((LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig())); } @SuppressWarnings("unchecked") public void testLoadMissingLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = new LearnToRankService( - mockModelLoadingService(), - mockTrainedModelProvider(), - mockScriptService(), - xContentRegistry() - ); + LearnToRankService learnToRankService = getTestLearnToRankService(); ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig("non-existing-model", Collections.emptyMap(), listener); + learnToRankService.loadLearnToRankConfig("non-existing-model", Map.of(), listener); verify(listener).onFailure(isA(ResourceNotFoundException.class)); } @SuppressWarnings("unchecked") public void testLoadBadLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = new LearnToRankService( - mockModelLoadingService(), - mockTrainedModelProvider(), - mockScriptService(), - xContentRegistry() - ); + LearnToRankService learnToRankService = getTestLearnToRankService(); ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(BAD_MODEL, Collections.emptyMap(), listener); + learnToRankService.loadLearnToRankConfig(BAD_MODEL, Map.of(), listener); verify(listener).onFailure(isA(ElasticsearchStatusException.class)); } @SuppressWarnings("unchecked") public void testLoadLearnToRankConfigWithTemplate() throws Exception { - LearnToRankService learnToRankService = new LearnToRankService( - mockModelLoadingService(), - mockTrainedModelProvider(), - mockScriptService(), - xContentRegistry() + LearnToRankConfig learnToRankConfig = new LearnToRankConfig( + 0, + List.of(new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "{{foo_param}}"))), + Map.of() ); - // When no parameters are provided we expect query to be rewritten into a match_none query. - { - ActionListener listener = mock(ActionListener.class); - SetOnce retrievedConfig = new SetOnce<>(); + LearnToRankService learnToRankService = getTestLearnToRankService(learnToRankConfig); + ActionListener listener = mock(ActionListener.class); + + learnToRankService.loadLearnToRankConfig("model-id", Map.ofEntries(Map.entry("foo_param", "foo")), listener); + verify(listener).onResponse(argThat(retrievedConfig -> { + assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(1)); + QueryExtractorBuilder queryExtractorBuilder = retrievedConfig.getQueryFeatureExtractorBuilders().get(0); + assertEquals(queryExtractorBuilder.featureName(), "feature_1"); + assertEquals(queryExtractorBuilder.query(), QueryProviderTests.createTestQueryProvider("field_1", "foo")); + return true; + })); + } + + @SuppressWarnings("unchecked") + public void testLoadLearnToRankConfigWithMissingTemplateParams() throws Exception { + LearnToRankConfig learnToRankConfig = new LearnToRankConfig( + 0, + List.of( + new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "foo")), + new QueryExtractorBuilder("feature_2", QueryProviderTests.createTestQueryProvider("field_1", "{{foo_param}}")), + new QueryExtractorBuilder("feature_3", QueryProviderTests.createTestQueryProvider("field_1", "{{bar_param}}"), 1.5f), + new QueryExtractorBuilder("feature_4", QueryProviderTests.createTestQueryProvider("field_1", "{{baz_param}}")) + ), + Map.of("baz_param", "default_value") + ); + + LearnToRankService learnToRankService = getTestLearnToRankService(learnToRankConfig); + ActionListener listener = mock(ActionListener.class); + + learnToRankService.loadLearnToRankConfig("model-id", randomBoolean() ? null : Map.of(), listener); + verify(listener).onResponse(argThat(retrievedConfig -> { + // Check all features are present. + assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(4)); + + Map queryExtractorBuilders = retrievedConfig.getQueryFeatureExtractorBuilders() + .stream() + .collect(Collectors.toMap(QueryExtractorBuilder::featureName, Function.identity())); + + // feature_1 will be extracted using the provided query since no params are missing for it + assertThat(queryExtractorBuilders, hasKey("feature_1")); + assertEquals(queryExtractorBuilders.get("feature_1").query(), QueryProviderTests.createTestQueryProvider("field_1", "foo")); - doAnswer(i -> { - retrievedConfig.set(i.getArgument(0, LearnToRankConfig.class)); - return null; - }).when(listener).onResponse(any()); - learnToRankService.loadLearnToRankConfig(TEMPLATED_GOOD_MODEL, null, listener); + // feature_2 will be extracted using a match_none query because {{foo_params}} is missing + assertThat(queryExtractorBuilders, hasKey("feature_2")); + assertEquals(queryExtractorBuilders.get("feature_2").query().getParsedQuery(), new MatchNoneQueryBuilder()); - assertNotNull(retrievedConfig.get()); - assertThat(retrievedConfig.get().getFeatureExtractorBuilders(), hasSize(2)); + // feature_3 will be extracted using a match_all query with a boost because: + // - {{bar_param}} is missing + // - a default_score is provided for the query extractor + assertThat(queryExtractorBuilders, hasKey("feature_3")); + assertEquals(queryExtractorBuilders.get("feature_3").query().getParsedQuery(), new MatchAllQueryBuilder().boost(1.5f)); + // feature_4 will be extracted using the default value for the {{baz_param}} + assertThat(queryExtractorBuilders, hasKey("feature_4")); assertEquals( - retrievedConfig.get(), - LearnToRankConfig.builder((LearnToRankConfig) TEMPLATED_GOOD_MODEL_CONFIG.getInferenceConfig()) - .setLearnToRankFeatureExtractorBuilders( - List.of( - new QueryExtractorBuilder("feature_1", QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder())), - new QueryExtractorBuilder("feature_2", QueryProvider.fromParsedQuery(new MatchNoneQueryBuilder())) - ) - ) - .build() - ); - } - - // Now testing when providing all the params of the template. - { - ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig( - TEMPLATED_GOOD_MODEL, - Map.ofEntries(Map.entry("foo_param", "foo"), Map.entry("bar_param", "bar")), - listener + queryExtractorBuilders.get("feature_4").query(), + QueryProviderTests.createTestQueryProvider("field_1", "default_value") ); - verify(listener).onResponse(argThat(retrievedConfig -> { - assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(2)); - assertEquals(retrievedConfig, GOOD_MODEL_CONFIG.getInferenceConfig()); - return true; - })); - } + return true; + })); } @Override @@ -191,7 +181,7 @@ protected NamedXContentRegistry xContentRegistry() { List namedXContent = new ArrayList<>(); namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); - namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); + namedXContent.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); return new NamedXContentRegistry(namedXContent); } @@ -208,7 +198,6 @@ private TrainedModelProvider mockTrainedModelProvider() { ActionListener l = invocation.getArgument(3, ActionListener.class); switch (modelId) { case GOOD_MODEL -> l.onResponse(GOOD_MODEL_CONFIG); - case TEMPLATED_GOOD_MODEL -> l.onResponse(TEMPLATED_GOOD_MODEL_CONFIG); case BAD_MODEL -> l.onResponse(BAD_MODEL_CONFIG); default -> l.onFailure(new ResourceNotFoundException("missing model")); } @@ -219,7 +208,40 @@ private TrainedModelProvider mockTrainedModelProvider() { return trainedModelProvider; } - private ScriptService mockScriptService() { + private LearnToRankService getTestLearnToRankService() { + return getTestLearnToRankService(mockTrainedModelProvider()); + } + + @SuppressWarnings("unchecked") + private LearnToRankService getTestLearnToRankService(LearnToRankConfig learnToRankConfig) { + TrainedModelProvider trainedModelProvider = mock(TrainedModelProvider.class); + + doAnswer(invocation -> { + String modelId = invocation.getArgument(0); + ActionListener l = invocation.getArgument(3, ActionListener.class); + + l.onResponse( + TrainedModelConfig.builder() + .setModelId(modelId) + .setInput(new TrainedModelInput(List.of("field1", "field2"))) + .setEstimatedOperations(1) + .setModelSize(2) + .setModelType(TrainedModelType.TREE_ENSEMBLE) + .setInferenceConfig(learnToRankConfig) + .build() + ); + return null; + + }).when(trainedModelProvider).getTrainedModel(any(), any(), any(), any()); + + return getTestLearnToRankService(trainedModelProvider); + } + + private LearnToRankService getTestLearnToRankService(TrainedModelProvider trainedModelProvider) { + return new LearnToRankService(mockModelLoadingService(), trainedModelProvider, getTestScriptService(), xContentRegistry()); + } + + private ScriptService getTestScriptService() { ScriptEngine scriptEngine = new MustacheScriptEngine(); return new ScriptService(Settings.EMPTY, Map.of(DEFAULT_TEMPLATE_LANG, scriptEngine), ScriptModule.CORE_CONTEXTS, () -> 1L); } From 3dcef66df5e5b2a1c1e56639a2f09181ce7913af Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Fri, 1 Dec 2023 18:59:31 +0100 Subject: [PATCH 155/263] Fix #102863 (#102875) There were two issues: * We mixed x and y when generating estimated errors * We did not deal with values near zero --- .../ql/util/SpatialCoordinateTypesTests.java | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java index 67e72d530e2e0..6909475c04521 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypesTests.java @@ -22,12 +22,19 @@ public class SpatialCoordinateTypesTests extends ESTestCase { private static final Map types = new LinkedHashMap<>(); static { types.put(SpatialCoordinateTypes.GEO, new TestTypeFunctions(ESTestCase::randomGeoPoint, v -> 1e-5)); - types.put(SpatialCoordinateTypes.CARTESIAN, new TestTypeFunctions(ESTestCase::randomCartesianPoint, v -> Math.abs(v / 1e5))); + types.put( + SpatialCoordinateTypes.CARTESIAN, + new TestTypeFunctions(ESTestCase::randomCartesianPoint, SpatialCoordinateTypesTests::cartesianError) + ); + } + + private static double cartesianError(double v) { + double abs = Math.abs(v); + return (abs < 1) ? 1e-5 : abs / 1e7; } record TestTypeFunctions(Supplier randomPoint, Function error) {} - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102863") public void testEncoding() { for (var type : types.entrySet()) { for (int i = 0; i < 10; i++) { @@ -35,8 +42,8 @@ public void testEncoding() { SpatialPoint original = type.getValue().randomPoint().get(); var error = type.getValue().error; SpatialPoint point = coordType.longAsPoint(coordType.pointAsLong(original)); - assertThat(coordType + ": Y[" + i + "]", point.getY(), closeTo(original.getY(), error.apply(original.getX()))); - assertThat(coordType + ": X[" + i + "]", point.getX(), closeTo(original.getX(), error.apply(original.getY()))); + assertThat(coordType + ": Y[" + i + "]", point.getY(), closeTo(original.getY(), error.apply(original.getY()))); + assertThat(coordType + ": X[" + i + "]", point.getX(), closeTo(original.getX(), error.apply(original.getX()))); } } } From 613e3b69360f05b557fc805319466623449707c1 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Fri, 1 Dec 2023 19:18:35 +0100 Subject: [PATCH 156/263] [Connector API] Implement Check in action (#102847) --- .../rest-api-spec/api/connector.check_in.json | 32 ++++ .../test/entsearch/333_connector_check_in.yml | 41 +++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 38 ++++- .../connector/ConnectorIndexService.java | 31 ++++ .../connector/ConnectorSyncInfo.java | 20 --- .../RestUpdateConnectorLastSeenAction.java | 41 +++++ ...ransportUpdateConnectorLastSeenAction.java | 55 +++++++ .../action/UpdateConnectorLastSeenAction.java | 147 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 50 ++++++ .../connector/ConnectorTestUtils.java | 2 +- ...tSeenActionRequestBWCSerializingTests.java | 41 +++++ ...SeenActionResponseBWCSerializingTests.java | 42 +++++ .../xpack/security/operator/Constants.java | 3 +- 14 files changed, 519 insertions(+), 29 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json new file mode 100644 index 0000000000000..a9db92aa450e0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json @@ -0,0 +1,32 @@ +{ + "connector.check_in": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the last_seen timestamp in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_check_in", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml new file mode 100644 index 0000000000000..042fea7091f43 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/333_connector_check_in.yml @@ -0,0 +1,41 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector +--- +"Connector Check-in": + - do: + connector.check_in: + connector_id: test-connector + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - exists: last_seen + +--- +"Connector Check-in Error - Connector doesn't exist": + - do: + catch: "missing" + connector.check_in: + connector_id: test-non-existent-connector + +--- +"Connector Check-in Error - connector_id is empty": + - do: + catch: "bad_request" + connector.check_in: + connector_id: "" diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 3402c3a8b9d7b..29758c3c334cc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; @@ -58,9 +59,11 @@ import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; @@ -196,6 +199,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), + new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), new ActionHandler<>(UpdateConnectorPipelineAction.INSTANCE, TransportUpdateConnectorPipelineAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), @@ -260,6 +264,7 @@ public List getRestHandlers( new RestListConnectorAction(), new RestPutConnectorAction(), new RestUpdateConnectorFilteringAction(), + new RestUpdateConnectorLastSeenAction(), new RestUpdateConnectorPipelineAction(), new RestUpdateConnectorSchedulingAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index d4aab30ba89bf..f824009196648 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -51,6 +51,7 @@ *
  • The name of the Elasticsearch index where the synchronized data is stored or managed.
  • *
  • A boolean flag 'isNative' indicating whether the connector is a native Elasticsearch connector.
  • *
  • The language associated with the connector.
  • + *
  • The timestamp when the connector was last active or seen.
  • *
  • A {@link ConnectorSyncInfo} object containing synchronization state and history information.
  • *
  • The name of the connector.
  • *
  • A {@link ConnectorIngestPipeline} object specifying the data ingestion pipeline configuration.
  • @@ -87,6 +88,8 @@ public class Connector implements NamedWriteable, ToXContentObject { @Nullable private final String language; @Nullable + private final Instant lastSeen; + @Nullable private final ConnectorSyncInfo syncInfo; @Nullable private final String name; @@ -115,6 +118,7 @@ public class Connector implements NamedWriteable, ToXContentObject { * @param indexName Name of the index associated with the connector. * @param isNative Flag indicating whether the connector is a native type. * @param language The language supported by the connector. + * @param lastSeen The timestamp when the connector was last active or seen. * @param syncInfo Information about the synchronization state of the connector. * @param name Name of the connector. * @param pipeline Ingest pipeline configuration. @@ -136,6 +140,7 @@ private Connector( String indexName, boolean isNative, String language, + Instant lastSeen, ConnectorSyncInfo syncInfo, String name, ConnectorIngestPipeline pipeline, @@ -156,6 +161,7 @@ private Connector( this.indexName = indexName; this.isNative = isNative; this.language = language; + this.lastSeen = lastSeen; this.syncInfo = syncInfo; this.name = name; this.pipeline = pipeline; @@ -178,6 +184,7 @@ public Connector(StreamInput in) throws IOException { this.indexName = in.readOptionalString(); this.isNative = in.readBoolean(); this.language = in.readOptionalString(); + this.lastSeen = in.readOptionalInstant(); this.syncInfo = in.readOptionalWriteable(ConnectorSyncInfo::new); this.name = in.readOptionalString(); this.pipeline = in.readOptionalWriteable(ConnectorIngestPipeline::new); @@ -199,6 +206,7 @@ public Connector(StreamInput in) throws IOException { public static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); public static final ParseField LANGUAGE_FIELD = new ParseField("language"); + public static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); static final ParseField NAME_FIELD = new ParseField("name"); public static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); public static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); @@ -221,6 +229,7 @@ public Connector(StreamInput in) throws IOException { .setIndexName((String) args[i++]) .setIsNative((Boolean) args[i++]) .setLanguage((String) args[i++]) + .setLastSeen((Instant) args[i++]) .setSyncInfo( new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) .setLastAccessControlSyncScheduledAt((Instant) args[i++]) @@ -228,7 +237,6 @@ public Connector(StreamInput in) throws IOException { .setLastDeletedDocumentCount((Long) args[i++]) .setLastIncrementalSyncScheduledAt((Instant) args[i++]) .setLastIndexedDocumentCount((Long) args[i++]) - .setLastSeen((Instant) args[i++]) .setLastSyncError((String) args[i++]) .setLastSyncScheduledAt((Instant) args[i++]) .setLastSyncStatus((ConnectorSyncStatus) args[i++]) @@ -272,6 +280,12 @@ public Connector(StreamInput in) throws IOException { PARSER.declareString(optionalConstructorArg(), INDEX_NAME_FIELD); PARSER.declareBoolean(optionalConstructorArg(), IS_NATIVE_FIELD); PARSER.declareString(optionalConstructorArg(), LANGUAGE_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + Connector.LAST_SEEN_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); PARSER.declareField( @@ -294,12 +308,6 @@ public Connector(StreamInput in) throws IOException { ObjectParser.ValueType.STRING ); PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), - ConnectorSyncInfo.LAST_SEEN_FIELD, - ObjectParser.ValueType.STRING - ); PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); PARSER.declareField( optionalConstructorArg(), @@ -394,6 +402,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (language != null) { builder.field(LANGUAGE_FIELD.getPreferredName(), language); } + builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); if (syncInfo != null) { syncInfo.toXContent(builder, params); } @@ -433,6 +442,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(indexName); out.writeBoolean(isNative); out.writeOptionalString(language); + out.writeOptionalInstant(lastSeen); out.writeOptionalWriteable(syncInfo); out.writeOptionalString(name); out.writeOptionalWriteable(pipeline); @@ -475,6 +485,10 @@ public Map getConfiguration() { return configuration; } + public Instant getLastSeen() { + return lastSeen; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -492,6 +506,7 @@ public boolean equals(Object o) { && Objects.equals(filtering, connector.filtering) && Objects.equals(indexName, connector.indexName) && Objects.equals(language, connector.language) + && Objects.equals(lastSeen, connector.lastSeen) && Objects.equals(syncInfo, connector.syncInfo) && Objects.equals(name, connector.name) && Objects.equals(pipeline, connector.pipeline) @@ -515,6 +530,7 @@ public int hashCode() { indexName, isNative, language, + lastSeen, syncInfo, name, pipeline, @@ -544,6 +560,8 @@ public static class Builder { private String indexName; private boolean isNative = false; private String language; + + private Instant lastSeen; private ConnectorSyncInfo syncInfo = new ConnectorSyncInfo.Builder().build(); private String name; private ConnectorIngestPipeline pipeline; @@ -611,6 +629,11 @@ public Builder setLanguage(String language) { return this; } + public Builder setLastSeen(Instant lastSeen) { + this.lastSeen = lastSeen; + return this; + } + public Builder setSyncInfo(ConnectorSyncInfo syncInfo) { this.syncInfo = syncInfo; return this; @@ -664,6 +687,7 @@ public Connector build() { indexName, isNative, language, + lastSeen, syncInfo, name, pipeline, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 749e8c2e9dd87..9730a0217b942 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -32,6 +32,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; @@ -261,6 +262,36 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } } + /** + * Updates the lastSeen property of a {@link Connector}. + * + * @param request The request for updating the connector's lastSeen status. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnectorList(SearchResponse response) { final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java index 10a2d54e29300..8f2002efff5b6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java @@ -33,8 +33,6 @@ public class ConnectorSyncInfo implements Writeable, ToXContentFragment { @Nullable private final Long lastIndexedDocumentCount; @Nullable - private final Instant lastSeen; - @Nullable private final String lastSyncError; @Nullable private final Instant lastSyncScheduledAt; @@ -50,7 +48,6 @@ public class ConnectorSyncInfo implements Writeable, ToXContentFragment { * @param lastDeletedDocumentCount The count of documents last deleted during sync. * @param lastIncrementalSyncScheduledAt The timestamp when the last incremental sync was scheduled. * @param lastIndexedDocumentCount The count of documents last indexed during sync. - * @param lastSeen The timestamp when the connector was last active or seen. * @param lastSyncError The last error message encountered during sync, if any. * @param lastSyncScheduledAt The timestamp when the last sync was scheduled. * @param lastSyncStatus The status of the last sync. @@ -63,7 +60,6 @@ private ConnectorSyncInfo( Long lastDeletedDocumentCount, Instant lastIncrementalSyncScheduledAt, Long lastIndexedDocumentCount, - Instant lastSeen, String lastSyncError, Instant lastSyncScheduledAt, ConnectorSyncStatus lastSyncStatus, @@ -75,7 +71,6 @@ private ConnectorSyncInfo( this.lastDeletedDocumentCount = lastDeletedDocumentCount; this.lastIncrementalSyncScheduledAt = lastIncrementalSyncScheduledAt; this.lastIndexedDocumentCount = lastIndexedDocumentCount; - this.lastSeen = lastSeen; this.lastSyncError = lastSyncError; this.lastSyncScheduledAt = lastSyncScheduledAt; this.lastSyncStatus = lastSyncStatus; @@ -89,7 +84,6 @@ public ConnectorSyncInfo(StreamInput in) throws IOException { this.lastDeletedDocumentCount = in.readOptionalLong(); this.lastIncrementalSyncScheduledAt = in.readOptionalInstant(); this.lastIndexedDocumentCount = in.readOptionalLong(); - this.lastSeen = in.readOptionalInstant(); this.lastSyncError = in.readOptionalString(); this.lastSyncScheduledAt = in.readOptionalInstant(); this.lastSyncStatus = in.readOptionalEnum(ConnectorSyncStatus.class); @@ -102,7 +96,6 @@ public ConnectorSyncInfo(StreamInput in) throws IOException { static final ParseField LAST_DELETED_DOCUMENT_COUNT_FIELD = new ParseField("last_deleted_document_count"); static final ParseField LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_incremental_sync_scheduled_at"); static final ParseField LAST_INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("last_indexed_document_count"); - static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); static final ParseField LAST_SYNC_ERROR_FIELD = new ParseField("last_sync_error"); static final ParseField LAST_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_sync_scheduled_at"); static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); @@ -129,9 +122,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (lastIndexedDocumentCount != null) { builder.field(LAST_INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), lastIndexedDocumentCount); } - if (lastSeen != null) { - builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); - } if (lastSyncError != null) { builder.field(LAST_SYNC_ERROR_FIELD.getPreferredName(), lastSyncError); } @@ -156,7 +146,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalLong(lastDeletedDocumentCount); out.writeOptionalInstant(lastIncrementalSyncScheduledAt); out.writeOptionalLong(lastIndexedDocumentCount); - out.writeOptionalInstant(lastSeen); out.writeOptionalString(lastSyncError); out.writeOptionalInstant(lastSyncScheduledAt); out.writeOptionalEnum(lastSyncStatus); @@ -174,7 +163,6 @@ public boolean equals(Object o) { && Objects.equals(lastDeletedDocumentCount, that.lastDeletedDocumentCount) && Objects.equals(lastIncrementalSyncScheduledAt, that.lastIncrementalSyncScheduledAt) && Objects.equals(lastIndexedDocumentCount, that.lastIndexedDocumentCount) - && Objects.equals(lastSeen, that.lastSeen) && Objects.equals(lastSyncError, that.lastSyncError) && Objects.equals(lastSyncScheduledAt, that.lastSyncScheduledAt) && lastSyncStatus == that.lastSyncStatus @@ -190,7 +178,6 @@ public int hashCode() { lastDeletedDocumentCount, lastIncrementalSyncScheduledAt, lastIndexedDocumentCount, - lastSeen, lastSyncError, lastSyncScheduledAt, lastSyncStatus, @@ -206,7 +193,6 @@ public static class Builder { private Long lastDeletedDocumentCount; private Instant lastIncrementalSyncScheduledAt; private Long lastIndexedDocumentCount; - private Instant lastSeen; private String lastSyncError; private Instant lastSyncScheduledAt; private ConnectorSyncStatus lastSyncStatus; @@ -242,11 +228,6 @@ public Builder setLastIndexedDocumentCount(Long lastIndexedDocumentCount) { return this; } - public Builder setLastSeen(Instant lastSeen) { - this.lastSeen = lastSeen; - return this; - } - public Builder setLastSyncError(String lastSyncError) { this.lastSyncError = lastSyncError; return this; @@ -275,7 +256,6 @@ public ConnectorSyncInfo build() { lastDeletedDocumentCount, lastIncrementalSyncScheduledAt, lastIndexedDocumentCount, - lastSeen, lastSyncError, lastSyncScheduledAt, lastSyncStatus, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java new file mode 100644 index 0000000000000..b2ebaa74984b1 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorLastSeenAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_last_seen_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_check_in")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorLastSeenAction.Request request = new UpdateConnectorLastSeenAction.Request(restRequest.param("connector_id")); + return channel -> client.execute( + UpdateConnectorLastSeenAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorLastSeenAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java new file mode 100644 index 0000000000000..3d3d2c9ee04b7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorLastSeenAction extends HandledTransportAction< + UpdateConnectorLastSeenAction.Request, + UpdateConnectorLastSeenAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorLastSeenAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorLastSeenAction.NAME, + transportService, + actionFilters, + UpdateConnectorLastSeenAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorLastSeenAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorLastSeen( + request, + listener.map(r -> new UpdateConnectorLastSeenAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java new file mode 100644 index 0000000000000..976be76ba84af --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.time.Instant; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class UpdateConnectorLastSeenAction extends ActionType { + + public static final UpdateConnectorLastSeenAction INSTANCE = new UpdateConnectorLastSeenAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_last_seen"; + + public UpdateConnectorLastSeenAction() { + super(NAME, UpdateConnectorLastSeenAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + private final Instant lastSeen; + + public Request(String connectorId) { + this.connectorId = connectorId; + this.lastSeen = Instant.now(); + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.lastSeen = in.readInstant(); + } + + public String getConnectorId() { + return connectorId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + return validationException; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.LAST_SEEN_FIELD.getPreferredName(), lastSeen); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeInstant(lastSeen); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(lastSeen, request.lastSeen); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, lastSeen); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 5f32f27b1ec64..c93135942348a 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; @@ -108,6 +109,28 @@ public void testUpdateConnectorFiltering() throws Exception { assertThat(filteringList, equalTo(indexedConnector.getFiltering())); } + public void testUpdateConnectorLastSeen() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + UpdateConnectorLastSeenAction.Request checkInRequest = new UpdateConnectorLastSeenAction.Request(connector.getConnectorId()); + DocWriteResponse updateResponse = awaitUpdateConnectorLastSeen(checkInRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnectorTime1 = awaitGetConnector(connector.getConnectorId()); + assertNotNull(indexedConnectorTime1.getLastSeen()); + + checkInRequest = new UpdateConnectorLastSeenAction.Request(connector.getConnectorId()); + updateResponse = awaitUpdateConnectorLastSeen(checkInRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnectorTime2 = awaitGetConnector(connector.getConnectorId()); + assertNotNull(indexedConnectorTime2.getLastSeen()); + assertTrue(indexedConnectorTime2.getLastSeen().isAfter(indexedConnectorTime1.getLastSeen())); + + } + public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); DocWriteResponse resp = awaitPutConnector(connector); @@ -232,6 +255,7 @@ private UpdateResponse awaitUpdateConnectorFiltering(UpdateConnectorFilteringAct final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); connectorIndexService.updateConnectorFiltering(updateFiltering, new ActionListener<>() { + @Override public void onResponse(UpdateResponse indexResponse) { resp.set(indexResponse); @@ -244,6 +268,7 @@ public void onFailure(Exception e) { latch.countDown(); } }); + assertTrue("Timeout waiting for update filtering request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); if (exc.get() != null) { throw exc.get(); @@ -252,6 +277,31 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorLastSeen(UpdateConnectorLastSeenAction.Request checkIn) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorLastSeen(checkIn, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for check-in request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from check-in request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorPipeline(UpdateConnectorPipelineAction.Request updatePipeline) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index e1752ed6fb354..a0cf018142599 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -73,7 +73,6 @@ public static ConnectorSyncInfo getRandomConnectorSyncInfo() { .setLastDeletedDocumentCount(randomFrom(new Long[] { null, randomLong() })) .setLastIncrementalSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastIndexedDocumentCount(randomFrom(new Long[] { null, randomLong() })) - .setLastSeen(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) @@ -190,6 +189,7 @@ public static Connector getRandomConnector() { .setIndexName(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setIsNative(randomBoolean()) .setLanguage(randomFrom(new String[] { null, randomAlphaOfLength(10) })) + .setLastSeen(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setSyncInfo(getRandomConnectorSyncInfo()) .setName(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setPipeline(randomBoolean() ? getRandomConnectorIngestPipeline() : null) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..13e9e546d516b --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionRequestBWCSerializingTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorLastSeenActionRequestBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorLastSeenAction.Request> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorLastSeenAction.Request::new; + } + + @Override + protected UpdateConnectorLastSeenAction.Request createTestInstance() { + return new UpdateConnectorLastSeenAction.Request(randomUUID()); + } + + @Override + protected UpdateConnectorLastSeenAction.Request mutateInstance(UpdateConnectorLastSeenAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorLastSeenAction.Request mutateInstanceForVersion( + UpdateConnectorLastSeenAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..d992f1b5f188e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorLastSeenActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorLastSeenAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorLastSeenAction.Response::new; + } + + @Override + protected UpdateConnectorLastSeenAction.Response createTestInstance() { + return new UpdateConnectorLastSeenAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorLastSeenAction.Response mutateInstance(UpdateConnectorLastSeenAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorLastSeenAction.Response mutateInstanceForVersion( + UpdateConnectorLastSeenAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index b9d005e695459..aa6c67798e3e3 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -127,9 +127,10 @@ public class Constants { "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", + "cluster:admin/xpack/connector/update_filtering", + "cluster:admin/xpack/connector/update_last_seen", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", - "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/check_in", From 6750eb37f9744296d2b11f0c63958e218a4cb339 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 1 Dec 2023 13:18:51 -0500 Subject: [PATCH 157/263] Better processor stat merge (#102821) --- docs/changelog/102821.yaml | 5 + .../org/elasticsearch/ingest/IngestStats.java | 67 ++++++++--- .../ingest/IngestStatsTests.java | 106 +++++++++++++----- 3 files changed, 136 insertions(+), 42 deletions(-) create mode 100644 docs/changelog/102821.yaml diff --git a/docs/changelog/102821.yaml b/docs/changelog/102821.yaml new file mode 100644 index 0000000000000..dcd6721621878 --- /dev/null +++ b/docs/changelog/102821.yaml @@ -0,0 +1,5 @@ +pr: 102821 +summary: Better processor stat merge +area: Ingest Node +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java index e197af5fbb46a..488a498f1640a 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestStats.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestStats.java @@ -28,7 +28,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; public record IngestStats(Stats totalStats, List pipelineStats, Map> processorStats) implements @@ -263,20 +262,62 @@ static List merge(List first, List sec */ public record ProcessorStat(String name, String type, Stats stats) { - // The list of ProcessorStats has *always* stats for each processor (even if processor was executed or not), so it's safe to zip - // both lists using a common index iterator. private static List merge(List first, List second) { - var merged = new ArrayList(); - assert first.size() == second.size() - : "stats size mismatch [" - + first.stream().map(ps -> ps.name + ":" + ps.type).collect(Collectors.joining(",")) - + "] [" - + second.stream().map(ps -> ps.name + ":" + ps.type).collect(Collectors.joining(",")) - + "]"; - for (var i = 0; i < first.size(); i++) { - merged.add(new ProcessorStat(first.get(i).name, first.get(i).type, Stats.merge(first.get(i).stats, second.get(i).stats))); + // in the simple case, this amounts to summing up the stats in the first and second and returning + // a new list of stats that contains the sum. but there are a few not-quite-so-simple cases, too, + // so this logic is a little bit intricate. + + // total up the stats across both sides + long firstIngestCountTotal = 0; + for (ProcessorStat ps : first) { + firstIngestCountTotal += ps.stats.ingestCount; + } + + long secondIngestCountTotal = 0; + for (ProcessorStat ps : second) { + secondIngestCountTotal += ps.stats.ingestCount; + } + + // early return in the case of a non-ingest node (the sum of the stats will be zero, so just return the other) + if (firstIngestCountTotal == 0) { + return second; + } else if (secondIngestCountTotal == 0) { + return first; + } + + // the list of stats can be different depending on the exact order of application of the cluster states + // that apply a change to a pipeline -- figure out if they match or not (usually they match!!!) + + // speculative execution of the expected, simple case (where we can merge the processor stats) + // if we process both lists of stats and everything matches up, we can return the resulting merged list + if (first.size() == second.size()) { // if the sizes of the lists don't match, then we can skip all this + boolean match = true; + var merged = new ArrayList(first.size()); + for (var i = 0; i < first.size(); i++) { + ProcessorStat ps1 = first.get(i); + ProcessorStat ps2 = second.get(i); + if (ps1.name.equals(ps2.name) == false || ps1.type.equals(ps2.type) == false) { + match = false; + break; + } else { + merged.add(new ProcessorStat(ps1.name, ps1.type, Stats.merge(ps1.stats, ps2.stats))); + } + } + if (match) { + return merged; + } + } + + // speculative execution failed, so we're in the unfortunate case. the lists are different, and they + // can't be meaningfully merged without more information. note that IngestService#innerUpdatePipelines + // resets the counts if there's enough variation on an update, so we'll favor the side with the *lower* + // count as being the 'newest' -- the assumption is that the higher side is just a cluster state + // application away from itself being reset to zero anyway. + if (firstIngestCountTotal < secondIngestCountTotal) { + return first; + } else { + return second; } - return merged; } } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java index c9bf1f97a4e9d..2be2f56677648 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java @@ -71,42 +71,86 @@ public void testPipelineStatsMerge() { ); } - public void testProcessorStatsMerge() { + public void testProcessorStatsMergeZeroCounts() { { - var first = Map.of("pipeline-1", randomPipelineProcessorStats()); + var expected = randomPipelineProcessorStats(); + var first = Map.of("pipeline-1", expected); + + // merging with an empty map yields the non-empty map assertEquals(IngestStats.merge(Map.of(), first), first); assertEquals(IngestStats.merge(first, Map.of()), first); + + // it's the same exact reference, in fact + assertSame(expected, IngestStats.merge(Map.of(), first).get("pipeline-1")); + assertSame(expected, IngestStats.merge(first, Map.of()).get("pipeline-1")); } { - var first = Map.of( - "pipeline-1", - randomPipelineProcessorStats(), - "pipeline-2", - randomPipelineProcessorStats(), - "pipeline-3", - randomPipelineProcessorStats() + var expected = randomPipelineProcessorStats(); + var first = Map.of("pipeline-1", expected); + var zero = List.of( + new IngestStats.ProcessorStat("proc-1", "type-1", zeroStats()), + new IngestStats.ProcessorStat("proc-1", "type-2", zeroStats()), + new IngestStats.ProcessorStat("proc-2", "type-1", zeroStats()), + new IngestStats.ProcessorStat("proc-3", "type-3", zeroStats()) ); - var second = Map.of( + var second = Map.of("pipeline-1", zero); + + // merging with a zero map yields the non-zero map + assertEquals(IngestStats.merge(second, first), first); + assertEquals(IngestStats.merge(first, second), first); + + // it's the same exact reference, in fact + assertSame(expected, IngestStats.merge(second, first).get("pipeline-1")); + assertSame(expected, IngestStats.merge(first, second).get("pipeline-1")); + } + } + + public void testProcessorStatsMerge() { + var first = Map.of( + "pipeline-1", + randomPipelineProcessorStats(), + "pipeline-2", + randomPipelineProcessorStats(), + "pipeline-3", + randomPipelineProcessorStats() + ); + var second = Map.of( + "pipeline-2", + randomPipelineProcessorStats(), + "pipeline-3", + randomPipelineProcessorStats(), + "pipeline-1", + randomPipelineProcessorStats() + ); + + assertEquals( + IngestStats.merge(first, second), + Map.of( + "pipeline-1", + expectedPipelineProcessorStats(first.get("pipeline-1"), second.get("pipeline-1")), "pipeline-2", - randomPipelineProcessorStats(), + expectedPipelineProcessorStats(first.get("pipeline-2"), second.get("pipeline-2")), "pipeline-3", - randomPipelineProcessorStats(), - "pipeline-1", - randomPipelineProcessorStats() - ); + expectedPipelineProcessorStats(first.get("pipeline-3"), second.get("pipeline-3")) + ) + ); + } - assertEquals( - IngestStats.merge(first, second), - Map.of( - "pipeline-1", - expectedPipelineProcessorStats(first.get("pipeline-1"), second.get("pipeline-1")), - "pipeline-2", - expectedPipelineProcessorStats(first.get("pipeline-2"), second.get("pipeline-2")), - "pipeline-3", - expectedPipelineProcessorStats(first.get("pipeline-3"), second.get("pipeline-3")) - ) - ); - } + public void testProcessorStatsMergeHeterogeneous() { + // if a pipeline has heterogeneous *non-zero* stats, then we defer to the one with a smaller total ingest count + + var first = Map.of( + "pipeline-1", + List.of( + new IngestStats.ProcessorStat("name-1", "type-1", new IngestStats.Stats(randomLongBetween(1, 100), 0, 0, 0)), + new IngestStats.ProcessorStat("name-2", "type-2", new IngestStats.Stats(randomLongBetween(1, 100), 0, 0, 0)) + ) + ); + var expected = List.of(new IngestStats.ProcessorStat("name-1", "type-1", new IngestStats.Stats(1, 0, 0, 0))); + var second = Map.of("pipeline-1", expected); + + assertEquals(second, IngestStats.merge(first, second)); + assertSame(expected, IngestStats.merge(second, first).get("pipeline-1")); } private static List expectedPipelineProcessorStats( @@ -117,7 +161,7 @@ private static List expectedPipelineProcessorStats( new IngestStats.ProcessorStat("proc-1", "type-1", merge(first.get(0).stats(), second.get(0).stats())), new IngestStats.ProcessorStat("proc-1", "type-2", merge(first.get(1).stats(), second.get(1).stats())), new IngestStats.ProcessorStat("proc-2", "type-1", merge(first.get(2).stats(), second.get(2).stats())), - new IngestStats.ProcessorStat("proc-3", "type-4", merge(first.get(3).stats(), second.get(3).stats())) + new IngestStats.ProcessorStat("proc-3", "type-3", merge(first.get(3).stats(), second.get(3).stats())) ); } @@ -126,7 +170,7 @@ private static List randomPipelineProcessorStats() { randomProcessorStat("proc-1", "type-1"), randomProcessorStat("proc-1", "type-2"), randomProcessorStat("proc-2", "type-1"), - randomProcessorStat("proc-3", "type-4") + randomProcessorStat("proc-3", "type-3") ); } @@ -216,4 +260,8 @@ private static IngestStats.PipelineStat randomPipelineStat(String id) { private static IngestStats.Stats randomStats() { return new IngestStats.Stats(randomLong(), randomLong(), randomLong(), randomLong()); } + + private static IngestStats.Stats zeroStats() { + return new IngestStats.Stats(0, 0, 0, 0); + } } From 8af50e314fe837c46353c12938c102bcab2aab63 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Fri, 1 Dec 2023 13:24:14 -0500 Subject: [PATCH 158/263] Reformatted RestStatus to include a blank line after each enum declaration (#102882) --- .../org/elasticsearch/rest/RestStatus.java | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/rest/RestStatus.java b/server/src/main/java/org/elasticsearch/rest/RestStatus.java index 101c412e1420e..84f7be32db6e5 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestStatus.java +++ b/server/src/main/java/org/elasticsearch/rest/RestStatus.java @@ -19,6 +19,7 @@ import static java.util.Collections.unmodifiableMap; public enum RestStatus { + /** * The client SHOULD continue with its request. This interim response is used to inform the client that the * initial part of the request has been received and has not yet been rejected by the server. The client @@ -26,6 +27,7 @@ public enum RestStatus { * ignore this response. The server MUST send a final response after the request has been completed. */ CONTINUE(100), + /** * The server understands and is willing to comply with the client's request, via the Upgrade message header field * (section 14.42), for a change in the application protocol being used on this connection. The server will @@ -33,6 +35,7 @@ public enum RestStatus { * which terminates the 101 response. */ SWITCHING_PROTOCOLS(101), + /** * The request has succeeded. The information returned with the response is dependent on the method * used in the request, for example: @@ -44,6 +47,7 @@ public enum RestStatus { * */ OK(200), + /** * The request has been fulfilled and resulted in a new resource being created. The newly created resource can * be referenced by the URI(s) returned in the entity of the response, with the most specific URI for the @@ -57,6 +61,7 @@ public enum RestStatus { * for the requested variant just created, see section 14.19. */ CREATED(201), + /** * The request has been accepted for processing, but the processing has not been completed. The request might * or might not eventually be acted upon, as it might be disallowed when processing actually takes place. There @@ -69,6 +74,7 @@ public enum RestStatus { * monitor or some estimate of when the user can expect the request to be fulfilled. */ ACCEPTED(202), + /** * The returned meta information in the entity-header is not the definitive set as available from the origin * server, but is gathered from a local or a third-party copy. The set presented MAY be a subset or super set @@ -77,6 +83,7 @@ public enum RestStatus { * is not required and is only appropriate when the response would otherwise be 200 (OK). */ NON_AUTHORITATIVE_INFORMATION(203), + /** * The server has fulfilled the request but does not need to return an entity-body, and might want to return * updated meta information. The response MAY include new or updated meta information in the form of @@ -91,6 +98,7 @@ public enum RestStatus { * line after the header fields. */ NO_CONTENT(204), + /** * The server has fulfilled the request and the user agent SHOULD reset the document view which caused the * request to be sent. This response is primarily intended to allow input for actions to take place via user @@ -98,6 +106,7 @@ public enum RestStatus { * another input action. The response MUST NOT include an entity. */ RESET_CONTENT(205), + /** * The server has fulfilled the partial GET request for the resource. The request MUST have included a Range * header field (section 14.35) indicating the desired range, and MAY have included an If-Range header @@ -127,6 +136,7 @@ public enum RestStatus { * A cache that does not support the Range and Content-Range headers MUST NOT cache 206 (Partial) responses. */ PARTIAL_CONTENT(206), + /** * The 207 (Multi-Status) status code provides status for multiple independent operations (see Section 13 for * more information). @@ -146,6 +156,7 @@ public enum RestStatus { * to identify the resource. */ MULTI_STATUS(207), + /** * The requested resource corresponds to any one of a set of representations, each with its own specific * location, and agent-driven negotiation information (section 12) is being provided so that the user (or user @@ -162,6 +173,7 @@ public enum RestStatus { * This response is cacheable unless indicated otherwise. */ MULTIPLE_CHOICES(300), + /** * The requested resource has been assigned a new permanent URI and any future references to this resource * SHOULD use one of the returned URIs. Clients with link editing capabilities ought to automatically re-link @@ -176,6 +188,7 @@ public enum RestStatus { * the conditions under which the request was issued. */ MOVED_PERMANENTLY(301), + /** * The requested resource resides temporarily under a different URI. Since the redirection might be altered on * occasion, the client SHOULD continue to use the Request-URI for future requests. This response is only @@ -189,6 +202,7 @@ public enum RestStatus { * the conditions under which the request was issued. */ FOUND(302), + /** * The response to the request can be found under a different URI and SHOULD be retrieved using a GET method on * that resource. This method exists primarily to allow the output of a POST-activated script to redirect the @@ -200,6 +214,7 @@ public enum RestStatus { * HEAD, the entity of the response SHOULD contain a short hypertext note with a hyperlink to the new URI(s). */ SEE_OTHER(303), + /** * If the client has performed a conditional GET request and access is allowed, but the document has not been * modified, the server SHOULD respond with this status code. The 304 response MUST NOT contain a message-body, @@ -228,12 +243,14 @@ public enum RestStatus { * reflect any new field values given in the response. */ NOT_MODIFIED(304), + /** * The requested resource MUST be accessed through the proxy given by the Location field. The Location field * gives the URI of the proxy. The recipient is expected to repeat this single request via the proxy. * 305 responses MUST only be generated by origin servers. */ USE_PROXY(305), + /** * The requested resource resides temporarily under a different URI. Since the redirection MAY be altered on * occasion, the client SHOULD continue to use the Request-URI for future requests. This response is only @@ -249,11 +266,13 @@ public enum RestStatus { * conditions under which the request was issued. */ TEMPORARY_REDIRECT(307), + /** * The request could not be understood by the server due to malformed syntax. The client SHOULD NOT repeat the * request without modifications. */ BAD_REQUEST(400), + /** * The request requires user authentication. The response MUST include a WWW-Authenticate header field * (section 14.47) containing a challenge applicable to the requested resource. The client MAY repeat the request @@ -265,10 +284,12 @@ public enum RestStatus { * "HTTP Authentication: Basic and Digest Access Authentication" [43]. */ UNAUTHORIZED(401), + /** * This code is reserved for future use. */ PAYMENT_REQUIRED(402), + /** * The server understood the request, but is refusing to fulfill it. Authorization will not help and the request * SHOULD NOT be repeated. If the request method was not HEAD and the server wishes to make public why the @@ -277,6 +298,7 @@ public enum RestStatus { * instead. */ FORBIDDEN(403), + /** * The server has not found anything matching the Request-URI. No indication is given of whether the condition * is temporary or permanent. The 410 (Gone) status code SHOULD be used if the server knows, through some @@ -285,11 +307,13 @@ public enum RestStatus { * has been refused, or when no other response is applicable. */ NOT_FOUND(404), + /** * The method specified in the Request-Line is not allowed for the resource identified by the Request-URI. * The response MUST include an Allow header containing a list of valid methods for the requested resource. */ METHOD_NOT_ALLOWED(405), + /** * The resource identified by the request is only capable of generating response entities which have content * characteristics not acceptable according to the accept headers sent in the request. @@ -308,6 +332,7 @@ public enum RestStatus { * the user for a decision on further actions. */ NOT_ACCEPTABLE(406), + /** * This code is similar to 401 (Unauthorized), but indicates that the client must first authenticate itself with * the proxy. The proxy MUST return a Proxy-Authenticate header field (section 14.33) containing a challenge @@ -316,11 +341,13 @@ public enum RestStatus { * "HTTP Authentication: Basic and Digest Access Authentication" [43]. */ PROXY_AUTHENTICATION(407), + /** * The client did not produce a request within the time that the server was prepared to wait. The client MAY * repeat the request without modifications at any later time. */ REQUEST_TIMEOUT(408), + /** * The request could not be completed due to a conflict with the current state of the resource. This code is * only allowed in situations where it is expected that the user might be able to resolve the conflict and @@ -335,6 +362,7 @@ public enum RestStatus { * a format defined by the response Content-Type. */ CONFLICT(409), + /** * The requested resource is no longer available at the server and no forwarding address is known. This condition * is expected to be considered permanent. Clients with link editing capabilities SHOULD delete references to @@ -350,11 +378,13 @@ public enum RestStatus { * owner. */ GONE(410), + /** * The server refuses to accept the request without a defined Content-Length. The client MAY repeat the request * if it adds a valid Content-Length header field containing the length of the message-body in the request message. */ LENGTH_REQUIRED(411), + /** * The precondition given in one or more of the request-header fields evaluated to false when it was tested on * the server. This response code allows the client to place preconditions on the current resource metainformation @@ -362,6 +392,7 @@ public enum RestStatus { * intended. */ PRECONDITION_FAILED(412), + /** * The server is refusing to process a request because the request entity is larger than the server is willing * or able to process. The server MAY close the connection to prevent the client from continuing the request. @@ -370,6 +401,7 @@ public enum RestStatus { * is temporary and after what time the client MAY try again. */ REQUEST_ENTITY_TOO_LARGE(413), + /** * The server is refusing to service the request because the Request-URI is longer than the server is willing * to interpret. This rare condition is only likely to occur when a client has improperly converted a POST @@ -379,11 +411,13 @@ public enum RestStatus { * buffers for reading or manipulating the Request-URI. */ REQUEST_URI_TOO_LONG(414), + /** * The server is refusing to service the request because the entity of the request is in a format not supported * by the requested resource for the requested method. */ UNSUPPORTED_MEDIA_TYPE(415), + /** * A server SHOULD return a response with this status code if a request included a Range request-header field * (section 14.35), and none of the range-specifier values in this field overlap the current extent of the @@ -396,12 +430,14 @@ public enum RestStatus { * response MUST NOT use the multipart/byteranges content-type. */ REQUESTED_RANGE_NOT_SATISFIED(416), + /** * The expectation given in an Expect request-header field (see section 14.20) could not be met by this server, * or, if the server is a proxy, the server has unambiguous evidence that the request could not be met by the * next-hop server. */ EXPECTATION_FAILED(417), + /** * The 422 (Unprocessable Entity) status code means the server understands the content type of the request * entity (hence a 415(Unsupported Media Type) status code is inappropriate), and the syntax of the request @@ -410,37 +446,44 @@ public enum RestStatus { * well-formed (i.e., syntactically correct), but semantically erroneous, XML instructions. */ UNPROCESSABLE_ENTITY(422), + /** * The 423 (Locked) status code means the source or destination resource of a method is locked. This response * SHOULD contain an appropriate precondition or postcondition code, such as 'lock-token-submitted' or * 'no-conflicting-lock'. */ LOCKED(423), + /** * The 424 (Failed Dependency) status code means that the method could not be performed on the resource because * the requested action depended on another action and that action failed. For example, if a command in a * PROPPATCH method fails, then, at minimum, the rest of the commands will also fail with 424 (Failed Dependency). */ FAILED_DEPENDENCY(424), + /** * 429 Too Many Requests (RFC6585) */ TOO_MANY_REQUESTS(429), + /** * The server encountered an unexpected condition which prevented it from fulfilling the request. */ INTERNAL_SERVER_ERROR(500), + /** * The server does not support the functionality required to fulfill the request. This is the appropriate * response when the server does not recognize the request method and is not capable of supporting it for any * resource. */ NOT_IMPLEMENTED(501), + /** * The server, while acting as a gateway or proxy, received an invalid response from the upstream server it * accessed in attempting to fulfill the request. */ BAD_GATEWAY(502), + /** * The server is currently unable to handle the request due to a temporary overloading or maintenance of the * server. The implication is that this is a temporary condition which will be alleviated after some delay. @@ -448,12 +491,14 @@ public enum RestStatus { * the client SHOULD handle the response as it would for a 500 response. */ SERVICE_UNAVAILABLE(503), + /** * The server, while acting as a gateway or proxy, did not receive a timely response from the upstream server * specified by the URI (e.g. HTTP, FTP, LDAP) or some other auxiliary server (e.g. DNS) it needed to access * in attempting to complete the request. */ GATEWAY_TIMEOUT(504), + /** * The server does not support, or refuses to support, the HTTP protocol version that was used in the request * message. The server is indicating that it is unable or unwilling to complete the request using the same major @@ -462,6 +507,7 @@ public enum RestStatus { * that server. */ HTTP_VERSION_NOT_SUPPORTED(505), + /** * The 507 (Insufficient Storage) status code means the method could not be performed on the resource because * the server is unable to store the representation needed to successfully complete the request. This condition From 60b7622de6c854ebe2cad13b8d5d7407f1901853 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 1 Dec 2023 10:29:26 -0800 Subject: [PATCH 159/263] Fix memory tracking in TopN.Row (#102831) This commit addresses the issue of missing memory tracking for the BitSet in TopN.Row. Instead of introducing BreakingBitSet, we replace the BitSet with a smaller array of offsets in this PR. Nik suggested to remove that BitSet, but I haven't looked into that option yet. Closes #100640 Closes #102683 Closes #102790 Closes #102784 --- docs/changelog/102831.yaml | 9 +++ .../compute/operator/topn/TopNOperator.java | 80 ++++++++++++++----- .../operator/topn/TopNOperatorTests.java | 5 +- .../compute/operator/topn/TopNRowTests.java | 17 +++- .../esql/qa/single_node/HeapAttackIT.java | 2 - 5 files changed, 84 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/102831.yaml diff --git a/docs/changelog/102831.yaml b/docs/changelog/102831.yaml new file mode 100644 index 0000000000000..fb99b0c7f732b --- /dev/null +++ b/docs/changelog/102831.yaml @@ -0,0 +1,9 @@ +pr: 102831 +summary: Fix memory tracking in TopN.Row +area: ES|QL +type: bug +issues: + - 100640 + - 102784 + - 102790 + - 102683 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java index 2ebc9c82c6d98..c3fc9fc68b60c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/TopNOperator.java @@ -25,7 +25,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.BitSet; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -51,8 +50,7 @@ public class TopNOperator implements Operator, Accountable { * multivalues) to reference each position in each block of the Page. */ static final class Row implements Accountable, Releasable { - private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(Row.class) + RamUsageEstimator - .shallowSizeOfInstance(BitSet.class); + private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(Row.class); /** * The sort key. @@ -64,7 +62,7 @@ static final class Row implements Accountable, Releasable { * For ex, if a Long is represented as 8 bytes, each of these bytes will have the same value (set/unset) if the respective Long * value is used for sorting ascending/descending. */ - final BitSet orderByCompositeKeyAscending = new BitSet(); + final BytesOrder bytesOrder; /** * Values to reconstruct the row. Sort of. When we reconstruct the row we read @@ -73,11 +71,12 @@ static final class Row implements Accountable, Releasable { */ final BreakingBytesRefBuilder values; - Row(CircuitBreaker breaker) { + Row(CircuitBreaker breaker, List sortOrders) { boolean success = false; try { keys = new BreakingBytesRefBuilder(breaker, "topn"); values = new BreakingBytesRefBuilder(breaker, "topn"); + bytesOrder = new BytesOrder(sortOrders, breaker, "topn"); success = true; } finally { if (success == false) { @@ -88,12 +87,54 @@ static final class Row implements Accountable, Releasable { @Override public long ramBytesUsed() { - return SHALLOW_SIZE + keys.ramBytesUsed() + orderByCompositeKeyAscending.size() / Byte.SIZE + values.ramBytesUsed(); + return SHALLOW_SIZE + keys.ramBytesUsed() + bytesOrder.ramBytesUsed() + values.ramBytesUsed(); } @Override public void close() { - Releasables.closeExpectNoException(keys, values); + Releasables.closeExpectNoException(keys, values, bytesOrder); + } + } + + static final class BytesOrder implements Releasable, Accountable { + private static final long BASE_RAM_USAGE = RamUsageEstimator.shallowSizeOfInstance(BytesOrder.class); + private final CircuitBreaker breaker; + final List sortOrders; + final int[] endOffsets; + + BytesOrder(List sortOrders, CircuitBreaker breaker, String label) { + this.breaker = breaker; + this.sortOrders = sortOrders; + breaker.addEstimateBytesAndMaybeBreak(memoryUsed(sortOrders.size()), label); + this.endOffsets = new int[sortOrders.size()]; + } + + /** + * Returns true if the byte at the given position is ordered ascending; otherwise, return false + */ + boolean isByteOrderAscending(int bytePosition) { + int index = Arrays.binarySearch(endOffsets, bytePosition); + if (index < 0) { + index = -1 - index; + } + return sortOrders.get(index).asc(); + } + + private long memoryUsed(int numKeys) { + // sortOrders is global and its memory is accounted at the top level TopNOperator + return BASE_RAM_USAGE + RamUsageEstimator.alignObjectSize( + (long) RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) Integer.BYTES * numKeys + ); + } + + @Override + public long ramBytesUsed() { + return memoryUsed(sortOrders.size()); + } + + @Override + public void close() { + breaker.addWithoutBreaking(-ramBytesUsed()); } } @@ -138,14 +179,11 @@ void row(int position, Row destination) { private void writeKey(int position, Row row) { int orderByCompositeKeyCurrentPosition = 0; - for (KeyFactory factory : keyFactories) { - int valueAsBytesSize = factory.extractor.writeKey(row.keys, position); - row.orderByCompositeKeyAscending.set( - orderByCompositeKeyCurrentPosition, - valueAsBytesSize + orderByCompositeKeyCurrentPosition, - factory.ascending - ); + for (int i = 0; i < keyFactories.length; i++) { + int valueAsBytesSize = keyFactories[i].extractor.writeKey(row.keys, position); + assert valueAsBytesSize > 0 : valueAsBytesSize; orderByCompositeKeyCurrentPosition += valueAsBytesSize; + row.bytesOrder.endOffsets[i] = orderByCompositeKeyCurrentPosition - 1; } } @@ -189,9 +227,7 @@ public record TopNOperatorFactory( List sortOrders, int maxPageSize ) implements OperatorFactory { - public TopNOperatorFactory - - { + public TopNOperatorFactory { for (ElementType e : elementTypes) { if (e == null) { throw new IllegalArgumentException("ElementType not known"); @@ -274,19 +310,20 @@ static int compareRows(Row r1, Row r2) { // the two rows are equal return 0; } + int length = Math.min(br1.length, br2.length); // one value is the prefix of the other if (mismatchedByteIndex == length) { // the value with the greater length is considered greater than the other if (length == br1.length) {// first row is less than the second row - return r2.orderByCompositeKeyAscending.get(length) ? 1 : -1; + return r2.bytesOrder.isByteOrderAscending(length) ? 1 : -1; } else {// second row is less than the first row - return r1.orderByCompositeKeyAscending.get(length) ? -1 : 1; + return r1.bytesOrder.isByteOrderAscending(length) ? -1 : 1; } } else { // compare the byte that mismatched accounting for that respective byte asc/desc ordering int c = Byte.compareUnsigned(br1.bytes[br1.offset + mismatchedByteIndex], br2.bytes[br2.offset + mismatchedByteIndex]); - return r1.orderByCompositeKeyAscending.get(mismatchedByteIndex) ? -c : c; + return r1.bytesOrder.isByteOrderAscending(mismatchedByteIndex) ? -c : c; } } @@ -312,10 +349,9 @@ public void addInput(Page page) { try { for (int i = 0; i < page.getPositionCount(); i++) { if (spare == null) { - spare = new Row(breaker); + spare = new Row(breaker, sortOrders); } else { spare.keys.clear(); - spare.orderByCompositeKeyAscending.clear(); spare.values.clear(); } rowFiller.row(i, spare); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index f43873b4fdfd9..be3e75fcce2a2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -434,13 +434,14 @@ private TopNOperator.Row row( Page page, int position ) { + final var sortOrders = List.of(new TopNOperator.SortOrder(channel, asc, nullsFirst)); TopNOperator.RowFiller rf = new TopNOperator.RowFiller( IntStream.range(0, page.getBlockCount()).mapToObj(i -> elementType).toList(), IntStream.range(0, page.getBlockCount()).mapToObj(i -> encoder).toList(), - List.of(new TopNOperator.SortOrder(channel, asc, nullsFirst)), + sortOrders, page ); - TopNOperator.Row row = new TopNOperator.Row(nonBreakingBigArrays().breakerService().getBreaker("request")); + TopNOperator.Row row = new TopNOperator.Row(nonBreakingBigArrays().breakerService().getBreaker("request"), sortOrders); rf.row(position, row); return row; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java index 472b9e50767b1..9fb3a7644ca20 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNRowTests.java @@ -12,25 +12,27 @@ import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.test.ESTestCase; +import java.util.List; + import static org.hamcrest.Matchers.equalTo; public class TopNRowTests extends ESTestCase { private final CircuitBreaker breaker = new NoopCircuitBreaker(CircuitBreaker.REQUEST); public void testRamBytesUsedEmpty() { - TopNOperator.Row row = new TopNOperator.Row(breaker); + TopNOperator.Row row = new TopNOperator.Row(breaker, sortOrders()); assertThat(row.ramBytesUsed(), equalTo(expectedRamBytesUsed(row))); } public void testRamBytesUsedSmall() { - TopNOperator.Row row = new TopNOperator.Row(new NoopCircuitBreaker(CircuitBreaker.REQUEST)); + TopNOperator.Row row = new TopNOperator.Row(new NoopCircuitBreaker(CircuitBreaker.REQUEST), sortOrders()); row.keys.append(randomByte()); row.values.append(randomByte()); assertThat(row.ramBytesUsed(), equalTo(expectedRamBytesUsed(row))); } public void testRamBytesUsedBig() { - TopNOperator.Row row = new TopNOperator.Row(new NoopCircuitBreaker(CircuitBreaker.REQUEST)); + TopNOperator.Row row = new TopNOperator.Row(new NoopCircuitBreaker(CircuitBreaker.REQUEST), sortOrders()); for (int i = 0; i < 10000; i++) { row.keys.append(randomByte()); row.values.append(randomByte()); @@ -38,6 +40,13 @@ public void testRamBytesUsedBig() { assertThat(row.ramBytesUsed(), equalTo(expectedRamBytesUsed(row))); } + private static List sortOrders() { + return List.of( + new TopNOperator.SortOrder(randomNonNegativeInt(), randomBoolean(), randomBoolean()), + new TopNOperator.SortOrder(randomNonNegativeInt(), randomBoolean(), randomBoolean()) + ); + } + private long expectedRamBytesUsed(TopNOperator.Row row) { long expected = RamUsageTester.ramUsed(row); if (row.values.bytes().length == 0) { @@ -47,6 +56,8 @@ private long expectedRamBytesUsed(TopNOperator.Row row) { // The breaker is shared infrastructure so we don't count it but RamUsageTester does expected -= RamUsageTester.ramUsed(breaker); expected -= RamUsageTester.ramUsed("topn"); + // the sort orders are shared + expected -= RamUsageTester.ramUsed(sortOrders()); return expected; } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java index ffe122b8de222..31d0a7646e1b7 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java @@ -9,7 +9,6 @@ import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -47,7 +46,6 @@ * Tests that run ESQL queries that have, in the past, used so much memory they * crash Elasticsearch. */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102784") public class HeapAttackIT extends ESRestTestCase { /** * This used to fail, but we've since compacted top n so it actually succeeds now. From caec612feafca1450ae9e1800fbbb7298609986e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 1 Dec 2023 13:45:43 -0500 Subject: [PATCH 160/263] Make cosine similarity faster by storing magnitude and normalizing vectors (#99445) `cosine` is our default similarity and should provide a good experience on speed. `dot_product` is faster than `cosine` as it doesn't require calculating vector magnitudes in the similarity comparison loop. Instead, it can assume vectors have a length of `1` and use an optimized `dot_product` calculation. However, `cosine` as it exists today accepts vectors of any magnitude and cannot take advantage of this. This commit addresses this by: - Normalizing all vectors passed when indexing via `cosine` - Storing the calculated magnitude in an additional field (only if its `!= 1`). - Using the `dot_product` Lucene calculation - Normalizing query vectors when used against these new `cosine` fields - De-normalizing vectors when accessed via scripts - Allowing scripts to access these stored magnitudes. --- docs/changelog/99445.yaml | 5 + .../test/painless/140_dense_vector_basic.yml | 84 ++++++ .../test/mixed_cluster/30_vector_search.yml | 14 +- .../test/old_cluster/30_vector_search.yml | 28 +- .../upgraded_cluster/30_vector_search.yml | 29 +- .../search.vectors/40_knn_search_cosine.yml | 253 ++++++++++++++++++ .../elasticsearch/index/IndexVersions.java | 1 + .../DenormalizedCosineFloatVectorValues.java | 106 ++++++++ .../vectors/DenseVectorFieldMapper.java | 172 +++++++++--- .../mapper/vectors/VectorIndexFieldData.java | 17 +- .../script/field/vectors/KnnDenseVector.java | 12 +- .../vectors/KnnDenseVectorDocValuesField.java | 7 + ...ormalizedCosineFloatVectorValuesTests.java | 114 ++++++++ .../vectors/DenseVectorFieldMapperTests.java | 103 ++++++- .../KnnDenseVectorScriptDocValuesTests.java | 7 +- 15 files changed, 878 insertions(+), 74 deletions(-) create mode 100644 docs/changelog/99445.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java diff --git a/docs/changelog/99445.yaml b/docs/changelog/99445.yaml new file mode 100644 index 0000000000000..deea5fbf2423c --- /dev/null +++ b/docs/changelog/99445.yaml @@ -0,0 +1,5 @@ +pr: 99445 +summary: Make cosine similarity faster by storing magnitude and normalizing vectors +area: Vector Search +type: enhancement +issues: [] diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml index 291f014662995..a4245621f83e0 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/140_dense_vector_basic.yml @@ -135,3 +135,87 @@ setup: - match: {hits.hits.2._id: "1"} - gte: {hits.hits.2._score: 0.78} - lte: {hits.hits.2._score: 0.791} + +--- +"L2 similarity with indexed cosine similarity vector": + - skip: + features: close_to + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "l2norm(params.query_vector, 'indexed_vector')" + params: + query_vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 301.36, error: 0.01}} + + - match: {hits.hits.1._id: "2"} + - close_to: {hits.hits.1._score: {value: 11.34, error: 0.01}} + + - match: {hits.hits.2._id: "3"} + - close_to: {hits.hits.2._score: {value: 0.01, error: 0.01}} +--- +"L1 similarity with indexed cosine similarity vector": + - skip: + features: close_to + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "l1norm(params.query_vector, 'indexed_vector')" + params: + query_vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 485.18, error: 0.01}} + + - match: {hits.hits.1._id: "2"} + - close_to: {hits.hits.1._score: {value: 12.30, error: 0.01}} + + - match: {hits.hits.2._id: "3"} + - close_to: {hits.hits.2._score: {value: 0.01, error: 0.01}} +--- +"Test vector magnitude equality": + - skip: + features: close_to + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "doc['vector'].magnitude" + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 429.6021, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 192.6447, error: 0.01}} + + - match: {hits.hits.2._id: "2"} + - close_to: {hits.hits.2._score: {value: 186.34454, error: 0.01}} diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml index 969c4428c7c6c..108f58b29bf27 100644 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml +++ b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/mixed_cluster/30_vector_search.yml @@ -1,5 +1,7 @@ --- "Search float indices created in old cluster": + - skip: + features: close_to - do: search: index: test-float-index @@ -15,11 +17,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -36,11 +38,11 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - do: search: index: test-float-index diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml index b471fa56a47a5..4aca71fe48f4a 100644 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml +++ b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml @@ -1,5 +1,7 @@ --- "Create indexed float vectors and search": + - skip: + features: close_to - do: indices.create: index: test-float-index @@ -56,11 +58,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -77,11 +79,11 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - do: search: @@ -114,6 +116,8 @@ --- "Create indexed byte vectors and search": + - skip: + features: close_to - do: indices.create: index: test-byte-index @@ -172,11 +176,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -193,11 +197,11 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - do: search: diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml index 60304a0078acc..ee2c357594b94 100644 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml +++ b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/upgraded_cluster/30_vector_search.yml @@ -1,5 +1,7 @@ --- "Search float indices created in old cluster": + - skip: + features: close_to - do: search: index: test-float-index @@ -15,11 +17,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -36,11 +38,11 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } - do: search: @@ -72,6 +74,8 @@ --- "Search byte indices created in old cluster": + - skip: + features: close_to - do: search: index: test-byte-index @@ -87,11 +91,11 @@ field: bdv - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "2" } - - match: { hits.hits.1._score: 21 } + - close_to: { hits.hits.1._score: { value: 21.0, error: 0.00001 } } - match: { hits.hits.2._id: "1" } - - match: { hits.hits.2._score: 15 } + - close_to: { hits.hits.2._score: { value: 15.0, error: 0.00001 } } - do: search: @@ -108,11 +112,12 @@ field: knn - match: { hits.hits.0._id: "3" } - - match: { hits.hits.0._score: 27 } + - close_to: { hits.hits.0._score: { value: 27.0, error: 0.00001 } } - match: { hits.hits.1._id: "5" } - - match: { hits.hits.1._score: 25 } + - close_to: { hits.hits.1._score: { value: 25.0, error: 0.00001 } } - match: { hits.hits.2._id: "7" } - - match: { hits.hits.2._score: 23 } + - close_to: { hits.hits.2._score: { value: 23.0, error: 0.00001 } } + - do: search: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml new file mode 100644 index 0000000000000..8faad25f0037d --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml @@ -0,0 +1,253 @@ +setup: + - skip: + version: ' - 7.99.99' + reason: 'kNN search added in 8.0' + - do: + indices.create: + index: test + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + index: true + similarity: cosine + normalized_vector: + type: dense_vector + dims: 5 + index: true + similarity: cosine + end_normalized: + type: dense_vector + dims: 5 + index: true + similarity: cosine + first_normalized: + type: dense_vector + dims: 5 + index: true + similarity: cosine + middle_normalized: + type: dense_vector + dims: 5 + index: true + similarity: cosine + + + - do: + index: + index: test + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + middle_normalized: [230.0, 300.33, -34.8988, 15.555, -200.0] + normalized_vector: [0.5353791, 0.6990887, -0.08123516, 0.03620792, -0.46554706] + end_normalized: [230.0, 300.33, -34.8988, 15.555, -200.0] + first_normalized: [0.5353791, 0.6990887, -0.08123516, 0.03620792, -0.46554706] + + - do: + index: + index: test + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + first_normalized: [-0.5, 100.0, -13, 14.8, -156.0] + normalized_vector: [-0.0026832016, 0.53664035, -0.06976324, 0.07942277, -0.8371589] + middle_normalized: [-0.0026832016, 0.53664035, -0.06976324, 0.07942277, -0.8371589] + end_normalized: [-0.0026832016, 0.53664035, -0.06976324, 0.07942277, -0.8371589] + + - do: + index: + index: test + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + first_normalized: [0.5, 111.3, -13.0, 14.8, -156.0] + middle_normalized: [0.5, 111.3, -13.0, 14.8, -156.0] + normalized_vector: [0.0025954517, 0.5777475, -0.06748174, 0.076825365, -0.8097809] + end_normalized: [0.0025954517, 0.5777475, -0.06748174, 0.076825365, -0.8097809] + + - do: + indices.refresh: {} + +--- +"kNN search only regular query": + - skip: + version: ' - 8.3.99' + reason: 'kNN added to search endpoint in 8.4' + features: close_to + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: normalized_vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: first_normalized + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: middle_normalized + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: end_normalized + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + +# With a normalized query vector, all should be the same + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: normalized_vector + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: first_normalized + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: middle_normalized + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: end_normalized + query_vector: [-0.0027626718, 0.4972809, -0.055253435, 0.081775084, -0.86195356] + k: 3 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - close_to: {hits.hits.0._score: {value: 0.999405, error: 0.0001}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 0.9976501, error: 0.0001}} diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index b6bebcf6abb12..75ee272e7effe 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -90,6 +90,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT = def(8_500_002, Version.LUCENE_9_7_0); public static final IndexVersion UPGRADE_LUCENE_9_8 = def(8_500_003, Version.LUCENE_9_8_0); public static final IndexVersion ES_VERSION_8_12 = def(8_500_004, Version.LUCENE_9_8_0); + public static final IndexVersion NORMALIZED_VECTOR_COSINE = def(8_500_005, Version.LUCENE_9_8_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java new file mode 100644 index 0000000000000..1bffbb4fd6c3d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper.vectors; + +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.NumericDocValues; + +import java.io.IOException; + +/** + * Provides the denormalized vectors. Float vectors stored with cosine similarity are normalized by default. So when reading the value + * for scripts, we to denormalize them. + */ +public class DenormalizedCosineFloatVectorValues extends FloatVectorValues { + + private final FloatVectorValues in; + private final NumericDocValues magnitudeIn; + private final float[] vector; + private float magnitude = 1f; + private boolean hasMagnitude; + private int docId = -1; + + public DenormalizedCosineFloatVectorValues(FloatVectorValues in, NumericDocValues magnitudeIn) { + this.in = in; + this.magnitudeIn = magnitudeIn; + this.vector = new float[in.dimension()]; + } + + @Override + public int dimension() { + return in.dimension(); + } + + @Override + public int size() { + return in.size(); + } + + @Override + public float[] vectorValue() throws IOException { + // Lazy load vectors as we may iterate but not actually require the vector + return vectorValue(in.docID()); + } + + @Override + public int docID() { + return in.docID(); + } + + @Override + public int nextDoc() throws IOException { + return in.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } + + public float magnitude() { + return magnitude; + } + + private float[] vectorValue(int docId) throws IOException { + if (docId != this.docId) { + this.docId = docId; + hasMagnitude = decodedMagnitude(docId); + // We should only copy and transform if we have a stored a non-unit length magnitude + if (hasMagnitude) { + System.arraycopy(in.vectorValue(), 0, vector, 0, dimension()); + for (int i = 0; i < vector.length; i++) { + vector[i] *= magnitude; + } + return vector; + } else { + return in.vectorValue(); + } + } else { + return hasMagnitude ? vector : in.vectorValue(); + } + } + + private boolean decodedMagnitude(int docId) throws IOException { + if (magnitudeIn == null) { + return false; + } + int currentDoc = magnitudeIn.docID(); + if (docId == currentDoc) { + return true; + } else { + if (magnitudeIn.advanceExact(docId)) { + magnitude = Float.intBitsToFloat((int) magnitudeIn.longValue()); + return true; + } else { + magnitude = 1f; + return false; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 6aaea1dd32285..423f5d81ebbd3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -15,12 +15,15 @@ import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.FloatDocValuesField; import org.apache.lucene.document.KnnByteVectorField; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorEncoding; @@ -63,6 +66,7 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.time.ZoneId; +import java.util.Arrays; import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -76,9 +80,16 @@ * A {@link FieldMapper} for indexing a dense vector of floats. */ public class DenseVectorFieldMapper extends FieldMapper { + public static final String COSINE_MAGNITUDE_FIELD_SUFFIX = "._magnitude"; + private static final float EPS = 1e-4f; + + static boolean isNotUnitVector(float magnitude) { + return Math.abs(magnitude - 1.0f) > EPS; + } public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; + public static final IndexVersion NORMALIZE_COSINE = IndexVersions.NORMALIZED_VECTOR_COSINE; public static final IndexVersion LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION = IndexVersions.V_8_9_0; public static final String CONTENT_TYPE = "dense_vector"; @@ -242,7 +253,8 @@ IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldTyp denseVectorFieldType.indexVersionCreated, this, denseVectorFieldType.dims, - denseVectorFieldType.indexed + denseVectorFieldType.indexed, + r -> r ); } @@ -311,7 +323,7 @@ void checkVectorMagnitude( } @Override - public Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException { + public void parseKnnVectorAndIndex(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException { int index = 0; byte[] vector = new byte[fieldMapper.fieldType().dims]; float squaredMagnitude = 0; @@ -356,7 +368,12 @@ public Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMappe } fieldMapper.checkDimensionMatches(index, context); checkVectorMagnitude(fieldMapper.fieldType().similarity, errorByteElementsAppender(vector), squaredMagnitude); - return createKnnVectorField(fieldMapper.fieldType().name(), vector, fieldMapper.fieldType().similarity.function); + Field field = createKnnVectorField( + fieldMapper.fieldType().name(), + vector, + fieldMapper.fieldType().similarity.vectorSimilarityFunction(fieldMapper.indexCreatedVersion, this) + ); + context.doc().addWithKey(fieldMapper.fieldType().name(), field); } @Override @@ -438,7 +455,32 @@ IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldTyp denseVectorFieldType.indexVersionCreated, this, denseVectorFieldType.dims, - denseVectorFieldType.indexed + denseVectorFieldType.indexed, + denseVectorFieldType.indexVersionCreated.onOrAfter(NORMALIZE_COSINE) + && denseVectorFieldType.indexed + && denseVectorFieldType.similarity.equals(VectorSimilarity.COSINE) ? r -> new FilterLeafReader(r) { + @Override + public CacheHelper getCoreCacheHelper() { + return r.getCoreCacheHelper(); + } + + @Override + public CacheHelper getReaderCacheHelper() { + return r.getReaderCacheHelper(); + } + + @Override + public FloatVectorValues getFloatVectorValues(String fieldName) throws IOException { + FloatVectorValues values = in.getFloatVectorValues(fieldName); + if (values == null) { + return null; + } + return new DenormalizedCosineFloatVectorValues( + values, + in.getNumericDocValues(fieldName + COSINE_MAGNITUDE_FIELD_SUFFIX) + ); + } + } : r -> r ); } @@ -464,7 +506,7 @@ void checkVectorMagnitude( throw new IllegalArgumentException(appender.apply(errorBuilder).toString()); } - if (similarity == VectorSimilarity.DOT_PRODUCT && Math.abs(squaredMagnitude - 1.0f) > 1e-4f) { + if (similarity == VectorSimilarity.DOT_PRODUCT && isNotUnitVector(squaredMagnitude)) { errorBuilder = new StringBuilder( "The [" + VectorSimilarity.DOT_PRODUCT + "] similarity can only be used with unit-length vectors." ); @@ -480,7 +522,7 @@ void checkVectorMagnitude( } @Override - public Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException { + public void parseKnnVectorAndIndex(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException { int index = 0; float[] vector = new float[fieldMapper.fieldType().dims]; float squaredMagnitude = 0; @@ -495,7 +537,23 @@ public Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMappe fieldMapper.checkDimensionMatches(index, context); checkVectorBounds(vector); checkVectorMagnitude(fieldMapper.fieldType().similarity, errorFloatElementsAppender(vector), squaredMagnitude); - return createKnnVectorField(fieldMapper.fieldType().name(), vector, fieldMapper.fieldType().similarity.function); + if (fieldMapper.indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) + && fieldMapper.fieldType().similarity.equals(VectorSimilarity.COSINE) + && isNotUnitVector(squaredMagnitude)) { + float length = (float) Math.sqrt(squaredMagnitude); + for (int i = 0; i < vector.length; i++) { + vector[i] /= length; + } + final String fieldName = fieldMapper.fieldType().name() + COSINE_MAGNITUDE_FIELD_SUFFIX; + Field magnitudeField = new FloatDocValuesField(fieldName, length); + context.doc().addWithKey(fieldName, magnitudeField); + } + Field field = createKnnVectorField( + fieldMapper.fieldType().name(), + vector, + fieldMapper.fieldType().similarity.vectorSimilarityFunction(fieldMapper.indexCreatedVersion, this) + ); + context.doc().addWithKey(fieldMapper.fieldType().name(), field); } @Override @@ -542,7 +600,7 @@ ByteBuffer createByteBuffer(IndexVersion indexVersion, int numBytes) { abstract IndexFieldData.Builder fielddataBuilder(DenseVectorFieldType denseVectorFieldType, FieldDataContext fieldDataContext); - abstract Field parseKnnVector(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException; + abstract void parseKnnVectorAndIndex(DocumentParserContext context, DenseVectorFieldMapper fieldMapper) throws IOException; abstract double parseKnnVectorToByteBuffer(DocumentParserContext context, DenseVectorFieldMapper fieldMapper, ByteBuffer byteBuffer) throws IOException; @@ -646,23 +704,35 @@ static Function errorByteElementsAppender(byte[] v ); enum VectorSimilarity { - L2_NORM(VectorSimilarityFunction.EUCLIDEAN) { + L2_NORM { @Override float score(float similarity, ElementType elementType, int dim) { return switch (elementType) { case BYTE, FLOAT -> 1f / (1f + similarity * similarity); }; } + + @Override + public VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType) { + return VectorSimilarityFunction.EUCLIDEAN; + } }, - COSINE(VectorSimilarityFunction.COSINE) { + COSINE { @Override float score(float similarity, ElementType elementType, int dim) { return switch (elementType) { case BYTE, FLOAT -> (1 + similarity) / 2f; }; } + + @Override + public VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType) { + return indexVersion.onOrAfter(NORMALIZE_COSINE) && ElementType.FLOAT.equals(elementType) + ? VectorSimilarityFunction.DOT_PRODUCT + : VectorSimilarityFunction.COSINE; + } }, - DOT_PRODUCT(VectorSimilarityFunction.DOT_PRODUCT) { + DOT_PRODUCT { @Override float score(float similarity, ElementType elementType, int dim) { return switch (elementType) { @@ -670,21 +740,25 @@ float score(float similarity, ElementType elementType, int dim) { case FLOAT -> (1 + similarity) / 2f; }; } + + @Override + public VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType) { + return VectorSimilarityFunction.DOT_PRODUCT; + } }, - MAX_INNER_PRODUCT(VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT) { + MAX_INNER_PRODUCT { @Override float score(float similarity, ElementType elementType, int dim) { return switch (elementType) { case BYTE, FLOAT -> similarity < 0 ? 1 / (1 + -1 * similarity) : similarity + 1; }; } - }; - - public final VectorSimilarityFunction function; - VectorSimilarity(VectorSimilarityFunction function) { - this.function = function; - } + @Override + public VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType) { + return VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT; + } + }; @Override public final String toString() { @@ -692,6 +766,8 @@ public final String toString() { } abstract float score(float similarity, ElementType elementType, int dim); + + public abstract VectorSimilarityFunction vectorSimilarityFunction(IndexVersion indexVersion, ElementType elementType); } private abstract static class IndexOptions implements ToXContent { @@ -892,11 +968,19 @@ public Query createKnnQuery( } elementType.checkVectorBounds(queryVector); - if (similarity == VectorSimilarity.DOT_PRODUCT - || similarity == VectorSimilarity.COSINE - || similarity == VectorSimilarity.MAX_INNER_PRODUCT) { + if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); elementType.checkVectorMagnitude(similarity, ElementType.errorFloatElementsAppender(queryVector), squaredMagnitude); + if (similarity == VectorSimilarity.COSINE + && ElementType.FLOAT.equals(elementType) + && indexVersionCreated.onOrAfter(NORMALIZE_COSINE) + && isNotUnitVector(squaredMagnitude)) { + float length = (float) Math.sqrt(squaredMagnitude); + queryVector = Arrays.copyOf(queryVector, queryVector.length); + for (int i = 0; i < queryVector.length; i++) { + queryVector[i] /= length; + } + } } Query knnQuery = switch (elementType) { case BYTE -> { @@ -983,19 +1067,22 @@ public void parse(DocumentParserContext context) throws IOException { context.addDynamicMapper(name(), update); return; } - Field field = fieldType().indexed ? parseKnnVector(context) : parseBinaryDocValuesVector(context); - context.doc().addWithKey(fieldType().name(), field); + if (fieldType().indexed) { + parseKnnVectorAndIndex(context); + } else { + parseBinaryDocValuesVectorAndIndex(context); + } } - private Field parseKnnVector(DocumentParserContext context) throws IOException { - return fieldType().elementType.parseKnnVector(context, this); + private void parseKnnVectorAndIndex(DocumentParserContext context) throws IOException { + fieldType().elementType.parseKnnVectorAndIndex(context, this); } - private Field parseBinaryDocValuesVector(DocumentParserContext context) throws IOException { - int dims = fieldType().dims; - ElementType elementType = fieldType().elementType; + private void parseBinaryDocValuesVectorAndIndex(DocumentParserContext context) throws IOException { // encode array of floats as array of integers and store into buf // this code is here and not in the VectorEncoderDecoder so not to create extra arrays + int dims = fieldType().dims; + ElementType elementType = fieldType().elementType; int numBytes = indexCreatedVersion.onOrAfter(MAGNITUDE_STORED_INDEX_VERSION) ? dims * elementType.elementBytes + MAGNITUDE_BYTES : dims * elementType.elementBytes; @@ -1007,7 +1094,8 @@ private Field parseBinaryDocValuesVector(DocumentParserContext context) throws I float vectorMagnitude = (float) Math.sqrt(dotProduct); byteBuffer.putFloat(vectorMagnitude); } - return new BinaryDocValuesField(fieldType().name(), new BytesRef(byteBuffer.array())); + Field field = new BinaryDocValuesField(fieldType().name(), new BytesRef(byteBuffer.array())); + context.doc().addWithKey(fieldType().name(), field); } private void checkDimensionExceeded(int index, DocumentParserContext context) { @@ -1120,7 +1208,7 @@ public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { ); } if (fieldType().indexed) { - return new IndexedSyntheticFieldLoader(); + return new IndexedSyntheticFieldLoader(indexCreatedVersion, fieldType().similarity); } return new DocValuesSyntheticFieldLoader(indexCreatedVersion); } @@ -1129,6 +1217,16 @@ private class IndexedSyntheticFieldLoader implements SourceLoader.SyntheticField private FloatVectorValues values; private ByteVectorValues byteVectorValues; private boolean hasValue; + private boolean hasMagnitude; + + private final IndexVersion indexCreatedVersion; + private final VectorSimilarity vectorSimilarity; + private NumericDocValues magnitudeReader; + + private IndexedSyntheticFieldLoader(IndexVersion indexCreatedVersion, VectorSimilarity vectorSimilarity) { + this.indexCreatedVersion = indexCreatedVersion; + this.vectorSimilarity = vectorSimilarity; + } @Override public Stream> storedFieldLoaders() { @@ -1139,8 +1237,12 @@ public Stream> storedFieldLoaders() { public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { values = leafReader.getFloatVectorValues(name()); if (values != null) { + if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) { + magnitudeReader = leafReader.getNumericDocValues(name() + COSINE_MAGNITUDE_FIELD_SUFFIX); + } return docId -> { hasValue = docId == values.advance(docId); + hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); return hasValue; }; } @@ -1164,10 +1266,18 @@ public void write(XContentBuilder b) throws IOException { if (false == hasValue) { return; } + float magnitude = Float.NaN; + if (hasMagnitude) { + magnitude = Float.intBitsToFloat((int) magnitudeReader.longValue()); + } b.startArray(simpleName()); if (values != null) { for (float v : values.vectorValue()) { - b.value(v); + if (hasMagnitude) { + b.value(v * magnitude); + } else { + b.value(v); + } } } else if (byteVectorValues != null) { byte[] vectorValue = byteVectorValues.vectorValue(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorIndexFieldData.java index 6ebfd1f57088b..3be341c54c7da 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorIndexFieldData.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.SortField; import org.elasticsearch.common.util.BigArrays; @@ -24,6 +25,8 @@ import org.elasticsearch.search.sort.BucketedSort; import org.elasticsearch.search.sort.SortOrder; +import java.util.function.Function; + public class VectorIndexFieldData implements IndexFieldData { protected final String fieldName; @@ -32,6 +35,7 @@ public class VectorIndexFieldData implements IndexFieldData readerWrapper; public VectorIndexFieldData( String fieldName, @@ -39,7 +43,8 @@ public VectorIndexFieldData( IndexVersion indexVersion, ElementType elementType, int dims, - boolean indexed + boolean indexed, + Function readerWrapper ) { this.fieldName = fieldName; this.valuesSourceType = valuesSourceType; @@ -47,6 +52,7 @@ public VectorIndexFieldData( this.elementType = elementType; this.dims = dims; this.indexed = indexed; + this.readerWrapper = readerWrapper; } @Override @@ -82,7 +88,7 @@ public BucketedSort newBucketedSort( @Override public VectorDVLeafFieldData load(LeafReaderContext context) { - return new VectorDVLeafFieldData(context.reader(), fieldName, indexVersion, elementType, dims, indexed); + return new VectorDVLeafFieldData(readerWrapper.apply(context.reader()), fieldName, indexVersion, elementType, dims, indexed); } @Override @@ -97,6 +103,7 @@ public static class Builder implements IndexFieldData.Builder { private final ElementType elementType; private final int dims; private final boolean indexed; + private final Function readerWrapper; public Builder( String name, @@ -104,7 +111,8 @@ public Builder( IndexVersion indexVersion, ElementType elementType, int dims, - boolean indexed + boolean indexed, + Function readerWrapper ) { this.name = name; this.valuesSourceType = valuesSourceType; @@ -112,11 +120,12 @@ public Builder( this.elementType = elementType; this.dims = dims; this.indexed = indexed; + this.readerWrapper = readerWrapper; } @Override public IndexFieldData build(IndexFieldDataCache cache, CircuitBreakerService breakerService) { - return new VectorIndexFieldData(name, valuesSourceType, indexVersion, elementType, dims, indexed); + return new VectorIndexFieldData(name, valuesSourceType, indexVersion, elementType, dims, indexed, readerWrapper); } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVector.java index 9edcd97df4747..1605f179e36aa 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVector.java @@ -16,9 +16,16 @@ public class KnnDenseVector implements DenseVector { protected final float[] docVector; + private float magnitude; public KnnDenseVector(float[] docVector) { this.docVector = docVector; + this.magnitude = Float.NaN; + } + + public KnnDenseVector(float[] docVector, float magnitude) { + this.docVector = docVector; + this.magnitude = magnitude; } @Override @@ -30,7 +37,10 @@ public float[] getVector() { @Override public float getMagnitude() { - return DenseVector.getMagnitude(docVector); + if (Float.isNaN(magnitude)) { + magnitude = DenseVector.getMagnitude(docVector); + } + return magnitude; } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java index b10d83cbdb52b..f5d637dc063b4 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.vectors.DenormalizedCosineFloatVectorValues; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.DenseVectorScriptDocValues; @@ -63,6 +64,9 @@ public DenseVector get() { return DenseVector.EMPTY; } + if (input instanceof DenormalizedCosineFloatVectorValues normalized) { + return new KnnDenseVector(vector, normalized.magnitude()); + } return new KnnDenseVector(vector); } @@ -72,6 +76,9 @@ public DenseVector get(DenseVector defaultValue) { return defaultValue; } + if (input instanceof DenormalizedCosineFloatVectorValues normalized) { + return new KnnDenseVector(vector, normalized.magnitude()); + } return new KnnDenseVector(vector); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java new file mode 100644 index 0000000000000..c158dcccd41d0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper.vectors; + +import org.apache.lucene.index.NumericDocValues; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; + +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.elasticsearch.index.mapper.vectors.KnnDenseVectorScriptDocValuesTests.wrap; + +public class DenormalizedCosineFloatVectorValuesTests extends ESTestCase { + + public void testEmptyVectors() throws IOException { + DenormalizedCosineFloatVectorValues normalizedCosineFloatVectorValues = new DenormalizedCosineFloatVectorValues( + wrap(new float[0][0]), + wrapMagnitudes(new float[0]) + ); + assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.nextDoc()); + } + + public void testRandomVectors() throws IOException { + int dims = randomIntBetween(64, 2048); + int numVectors = randomIntBetween(1, 24); + float[][] vectors = new float[numVectors][]; + float[][] normalizedVectors = new float[numVectors][]; + float[] magnitudes = new float[numVectors]; + for (int i = 0; i < numVectors; i++) { + float[] vector = new float[dims]; + float mag = randomVector(vector); + magnitudes[i] = mag; + vectors[i] = vector; + normalizedVectors[i] = copyAndNormalize(vector, mag); + } + + DenormalizedCosineFloatVectorValues normalizedCosineFloatVectorValues = new DenormalizedCosineFloatVectorValues( + wrap(normalizedVectors), + wrapMagnitudes(magnitudes) + ); + + for (int i = 0; i < numVectors; i++) { + assertEquals(i, normalizedCosineFloatVectorValues.advance(i)); + assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(), (float) 1e-6); + assertEquals(magnitudes[i], normalizedCosineFloatVectorValues.magnitude(), (float) 1e-6); + } + + } + + public static float[] copyAndNormalize(float[] in, float mag) { + float[] copy = Arrays.copyOf(in, in.length); + for (int i = 0; i < copy.length; i++) { + copy[i] = copy[i] / mag; + } + return copy; + } + + private static float randomVector(float[] in) { + float magnitude = 0f; + for (int i = 0; i < in.length; i++) { + float v = randomFloat() * randomIntBetween(1, 5); + in[i] = v; + magnitude += v * v; + } + return (float) Math.sqrt(magnitude); + } + + public static NumericDocValues wrapMagnitudes(float[] magnitudes) { + return new NumericDocValues() { + int index = -1; + + @Override + public long longValue() throws IOException { + return Float.floatToRawIntBits(magnitudes[index]); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return advance(target) != NO_MORE_DOCS; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() { + return advance(index + 1); + } + + @Override + public int advance(int target) { + if (target >= magnitudes.length) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return magnitudes.length; + } + }; + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index b10d756a6e458..0b3b4fae82324 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -17,9 +17,11 @@ import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.VectorEncoding; +import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentHelper; @@ -44,6 +46,7 @@ import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.lookup.SourceProvider; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.AssumptionViolatedException; @@ -280,7 +283,10 @@ private void testIndexedVector(VectorSimilarity similarity, DocumentMapper mappe KnnFloatVectorField vectorField = (KnnFloatVectorField) fields.get(0); assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue(), 0.001f); - assertEquals(similarity.function, vectorField.fieldType().vectorSimilarityFunction()); + assertEquals( + similarity.vectorSimilarityFunction(IndexVersion.current(), ElementType.FLOAT), + vectorField.fieldType().vectorSimilarityFunction() + ); } public void testNonIndexedVector() throws Exception { @@ -333,7 +339,10 @@ public void testIndexedByteVector() throws Exception { new byte[] { (byte) -1, (byte) 1, (byte) 127 }, vectorField.vectorValue() ); - assertEquals(similarity.function, vectorField.fieldType().vectorSimilarityFunction()); + assertEquals( + similarity.vectorSimilarityFunction(IndexVersion.current(), ElementType.BYTE), + vectorField.fieldType().vectorSimilarityFunction() + ); } public void testDotProductWithInvalidNorm() throws Exception { @@ -571,7 +580,7 @@ public void testDefaultParamsBeforeIndexByDefault() throws Exception { assertNull(denseVectorFieldType.getSimilarity()); } - public void testtParamsBeforeIndexByDefault() throws Exception { + public void testParamsBeforeIndexByDefault() throws Exception { DocumentMapper documentMapper = createDocumentMapper(INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION, fieldMapping(b -> { b.field("type", "dense_vector").field("dims", 3).field("index", true).field("similarity", "dot_product"); })); @@ -651,6 +660,48 @@ public void testDocumentsWithIncorrectDims() throws Exception { } } + public void testCosineDenseVectorValues() throws IOException { + final int dims = randomIntBetween(64, 2048); + VectorSimilarity similarity = VectorSimilarity.COSINE; + DocumentMapper mapper = createDocumentMapper( + fieldMapping(b -> b.field("type", "dense_vector").field("dims", dims).field("index", true).field("similarity", similarity)) + ); + float[] vector = new float[dims]; + for (int i = 0; i < dims; i++) { + vector[i] = randomFloat() * randomIntBetween(1, 10); + } + ParsedDocument doc1 = mapper.parse(source(b -> b.array("field", vector))); + List fields = doc1.rootDoc().getFields("field"); + + assertEquals(1, fields.size()); + assertThat(fields.get(0), instanceOf(KnnFloatVectorField.class)); + KnnFloatVectorField vectorField = (KnnFloatVectorField) fields.get(0); + // Cosine vectors are now normalized + VectorUtil.l2normalize(vector); + assertArrayEquals("Parsed vector is not equal to normalized original.", vector, vectorField.vectorValue(), 0.001f); + } + + public void testCosineDenseVectorValuesOlderIndexVersions() throws IOException { + final int dims = randomIntBetween(64, 2048); + VectorSimilarity similarity = VectorSimilarity.COSINE; + DocumentMapper mapper = createDocumentMapper( + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersions.NEW_SPARSE_VECTOR), + fieldMapping(b -> b.field("type", "dense_vector").field("dims", dims).field("index", true).field("similarity", similarity)) + ); + float[] vector = new float[dims]; + for (int i = 0; i < dims; i++) { + vector[i] = randomFloat() * randomIntBetween(1, 10); + } + ParsedDocument doc1 = mapper.parse(source(b -> b.array("field", vector))); + List fields = doc1.rootDoc().getFields("field"); + + assertEquals(1, fields.size()); + assertThat(fields.get(0), instanceOf(KnnFloatVectorField.class)); + KnnFloatVectorField vectorField = (KnnFloatVectorField) fields.get(0); + // Cosine vectors are now normalized + assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue(), 0.001f); + } + /** * Test that max dimensions limit for float dense_vector field * is 4096 as defined by {@link DenseVectorFieldMapper#MAX_DIMS_COUNT} @@ -674,7 +725,9 @@ public void testMaxDimsFloatVector() throws IOException { KnnFloatVectorField vectorField = (KnnFloatVectorField) fields.get(0); assertEquals(dims, vectorField.fieldType().vectorDimension()); assertEquals(VectorEncoding.FLOAT32, vectorField.fieldType().vectorEncoding()); - assertEquals(similarity.function, vectorField.fieldType().vectorSimilarityFunction()); + assertEquals(VectorSimilarityFunction.DOT_PRODUCT, vectorField.fieldType().vectorSimilarityFunction()); + // Cosine vectors are now normalized + VectorUtil.l2normalize(vector); assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue(), 0.001f); } @@ -708,10 +761,50 @@ public void testMaxDimsByteVector() throws IOException { KnnByteVectorField vectorField = (KnnByteVectorField) fields.get(0); assertEquals(dims, vectorField.fieldType().vectorDimension()); assertEquals(VectorEncoding.BYTE, vectorField.fieldType().vectorEncoding()); - assertEquals(similarity.function, vectorField.fieldType().vectorSimilarityFunction()); + assertEquals( + similarity.vectorSimilarityFunction(IndexVersion.current(), ElementType.BYTE), + vectorField.fieldType().vectorSimilarityFunction() + ); assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue()); } + public void testVectorSimilarity() { + assertEquals( + VectorSimilarityFunction.COSINE, + VectorSimilarity.COSINE.vectorSimilarityFunction(IndexVersion.current(), ElementType.BYTE) + ); + assertEquals( + VectorSimilarityFunction.COSINE, + VectorSimilarity.COSINE.vectorSimilarityFunction( + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, DenseVectorFieldMapper.NORMALIZE_COSINE), + ElementType.FLOAT + ) + ); + assertEquals( + VectorSimilarityFunction.DOT_PRODUCT, + VectorSimilarity.COSINE.vectorSimilarityFunction( + IndexVersionUtils.randomVersionBetween(random(), DenseVectorFieldMapper.NORMALIZE_COSINE, IndexVersion.current()), + ElementType.FLOAT + ) + ); + assertEquals( + VectorSimilarityFunction.EUCLIDEAN, + VectorSimilarity.L2_NORM.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.BYTE) + ); + assertEquals( + VectorSimilarityFunction.EUCLIDEAN, + VectorSimilarity.L2_NORM.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.FLOAT) + ); + assertEquals( + VectorSimilarityFunction.DOT_PRODUCT, + VectorSimilarity.DOT_PRODUCT.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.BYTE) + ); + assertEquals( + VectorSimilarityFunction.DOT_PRODUCT, + VectorSimilarity.DOT_PRODUCT.vectorSimilarityFunction(IndexVersionUtils.randomVersion(random()), ElementType.FLOAT) + ); + } + @Override protected void assertFetchMany(MapperService mapperService, String field, Object value, String format, int count) throws IOException { assumeFalse("Dense vectors currently don't support multiple values in the same field", false); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java index ce71236b3524f..81fdf7d7bec24 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java @@ -234,12 +234,13 @@ public int advance(int target) { } public static FloatVectorValues wrap(float[][] vectors) { + int dim = vectors.length > 0 ? vectors[0].length : 0; return new FloatVectorValues() { - int index = 0; + int index = -1; @Override public int dimension() { - return 0; + return dim; } @Override @@ -259,7 +260,7 @@ public int docID() { @Override public int nextDoc() { - throw new UnsupportedOperationException(); + return advance(index + 1); } @Override From c4e369dafe96c1cab5122a466005b3f413edd9b2 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Fri, 1 Dec 2023 14:35:49 -0500 Subject: [PATCH 161/263] [ML] Implementing Hugging Face text embedding service (#102730) * Pulling in main * Refactoring and adding hugging face text embedding * Adding a few missing items and reminders * Adding error handling for hugging face * Fixing some issues with hugging face * Fixing tests * Fixing spotless * Adding support for array of arrays response format * More comments * Addressing feedback * Adding more tests and fixing errors * Adding null check and tests --------- Co-authored-by: Elastic Machine --- .../org/elasticsearch/TransportVersions.java | 1 + .../inference/src/main/java/module-info.java | 2 + .../InferenceNamedWriteablesProvider.java | 7 +- .../xpack/inference/InferencePlugin.java | 2 + .../xpack/inference/UnparsedModel.java | 35 -- .../action/huggingface/HuggingFaceAction.java | 78 +++ .../huggingface/HuggingFaceActionCreator.java | 49 ++ .../huggingface/HuggingFaceActionVisitor.java | 18 + .../huggingface/HuggingFaceElserAction.java | 53 -- .../http/retry/BaseResponseHandler.java | 90 +++ .../external/http/retry/ErrorMessage.java | 12 + .../external/http/retry/RetryException.java | 6 +- .../http/retry/RetryingHttpSender.java | 16 +- .../huggingface/HuggingFaceClient.java | 48 -- .../HuggingFaceResponseHandler.java | 63 ++ .../external/openai/OpenAiClient.java | 3 +- .../openai/OpenAiResponseHandler.java | 64 ++ ....java => HuggingFaceInferenceRequest.java} | 6 +- ...=> HuggingFaceInferenceRequestEntity.java} | 4 +- .../external/response/XContentUtils.java | 46 ++ .../HuggingFaceElserResponseEntity.java | 6 +- .../HuggingFaceEmbeddingsResponseEntity.java | 161 +++++ .../HuggingFaceErrorResponseEntity.java | 52 ++ .../OpenAiEmbeddingsResponseEntity.java | 30 +- .../openai/OpenAiErrorResponseEntity.java | 6 +- .../inference/registry/ModelRegistry.java | 8 +- .../inference/services/SenderService.java | 77 +++ ...MapParsingUtils.java => ServiceUtils.java} | 32 +- .../services/elser/ElserMlNodeService.java | 4 +- .../elser/ElserMlNodeServiceSettings.java | 14 +- .../huggingface/HuggingFaceBaseService.java | 106 ++++ .../huggingface/HuggingFaceModel.java | 29 + .../huggingface/HuggingFaceService.java | 55 ++ .../HuggingFaceServiceSettings.java | 89 +++ .../elser/HuggingFaceElserModel.java | 44 +- .../elser/HuggingFaceElserSecretSettings.java | 9 +- .../elser/HuggingFaceElserService.java | 115 +--- .../HuggingFaceElserServiceSettings.java | 22 +- .../HuggingFaceEmbeddingsModel.java | 70 +++ .../openai/OpenAiResponseHandler.java | 127 ---- .../services/openai/OpenAiService.java | 93 +-- .../openai/OpenAiServiceSettings.java | 6 +- .../embeddings/OpenAiEmbeddingsModel.java | 5 +- .../OpenAiEmbeddingsRequestTaskSettings.java | 2 +- .../OpenAiEmbeddingsTaskSettings.java | 4 +- .../settings/DefaultSecretSettings.java | 9 +- .../HuggingFaceActionCreatorTests.java} | 151 ++++- .../huggingface/HuggingFaceActionTests.java | 114 ++++ .../HuggingFaceElserActionTests.java | 189 ------ .../http/retry/BaseResponseHandlerTests.java | 32 + .../HuggingFaceResponseHandlerTests.java | 93 +++ .../openai/OpenAiResponseHandlerTests.java | 84 +++ .../HuggingFaceElserRequestEntityTests.java | 2 +- .../HuggingFaceElserRequestTests.java | 6 +- ...gingFaceEmbeddingsResponseEntityTests.java | 339 ++++++++++ .../HuggingFaceErrorResponseEntityTests.java | 62 ++ .../OpenAiEmbeddingsResponseEntityTests.java | 33 - .../OpenAiErrorResponseEntityTests.java | 2 +- .../xpack/inference/model/TestModel.java | 14 +- .../services/SenderServiceTests.java | 143 +++++ ...UtilsTests.java => ServiceUtilsTests.java} | 32 +- .../xpack/inference/services/Utils.java | 27 + .../HuggingFaceBaseServiceTests.java | 111 ++++ .../HuggingFaceServiceSettingsTests.java | 107 ++++ .../huggingface/HuggingFaceServiceTests.java | 589 ++++++++++++++++++ .../elser/HuggingFaceElserModelTests.java | 32 + .../HuggingFaceElserSecretSettingsTests.java | 4 + .../HuggingFaceEmbeddingsModelTests.java | 34 + .../openai/OpenAiResponseHandlerTests.java | 63 -- .../openai/OpenAiServiceSettingsTests.java | 1 - .../services/openai/OpenAiServiceTests.java | 305 +++++---- .../settings/DefaultSecretSettingsTests.java | 4 + 72 files changed, 3369 insertions(+), 982 deletions(-) delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreator.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionVisitor.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ErrorMessage.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/{HuggingFaceElserRequest.java => HuggingFaceInferenceRequest.java} (86%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/{HuggingFaceElserRequestEntity.java => HuggingFaceInferenceRequestEntity.java} (85%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/{MapParsingUtils.java => ServiceUtils.java} (79%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandler.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/{huggingface/HuggingFaceClientTests.java => action/huggingface/HuggingFaceActionCreatorTests.java} (50%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/{MapParsingUtilsTests.java => ServiceUtilsTests.java} (88%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandlerTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index b6e204f3839f7..c392d3b6b4e29 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -183,6 +183,7 @@ static TransportVersion def(int id) { public static final TransportVersion INFERENCE_SERVICE_RESULTS_ADDED = def(8_550_00_0); public static final TransportVersion ESQL_PROFILE = def(8_551_00_0); public static final TransportVersion CLUSTER_STATS_RESCORER_USAGE_ADDED = def(8_552_00_0); + public static final TransportVersion ML_INFERENCE_HF_SERVICE_ADDED = def(8_553_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 87f623bdfe5cc..3879a0a344e06 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -22,6 +22,8 @@ exports org.elasticsearch.xpack.inference.registry; exports org.elasticsearch.xpack.inference.rest; exports org.elasticsearch.xpack.inference.services; + exports org.elasticsearch.xpack.inference.external.http.sender; + exports org.elasticsearch.xpack.inference.external.http; exports org.elasticsearch.xpack.inference.services.elser; exports org.elasticsearch.xpack.inference.services.huggingface.elser; exports org.elasticsearch.xpack.inference.services.openai; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 092b1200fb80a..c632c568fea16 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettings; @@ -32,6 +33,7 @@ public class InferenceNamedWriteablesProvider { private InferenceNamedWriteablesProvider() {} + @SuppressWarnings("deprecation") public static List getNamedWriteables() { List namedWriteables = new ArrayList<>(); @@ -62,7 +64,7 @@ public static List getNamedWriteables() { new NamedWriteableRegistry.Entry(TaskSettings.class, ElserMlNodeTaskSettings.NAME, ElserMlNodeTaskSettings::new) ); - // Hugging Face ELSER config + // Hugging Face config namedWriteables.add( new NamedWriteableRegistry.Entry( ServiceSettings.class, @@ -70,6 +72,9 @@ public static List getNamedWriteables() { HuggingFaceElserServiceSettings::new ) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry(ServiceSettings.class, HuggingFaceServiceSettings.NAME, HuggingFaceServiceSettings::new) + ); namedWriteables.add( new NamedWriteableRegistry.Entry(SecretSettings.class, HuggingFaceElserSecretSettings.NAME, HuggingFaceElserSecretSettings::new) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 7e7f2c9e05680..3adc63c9863cb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceService; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; import org.elasticsearch.xpack.inference.services.openai.OpenAiService; @@ -147,6 +148,7 @@ public List getInferenceServiceFactories() { return List.of( ElserMlNodeService::new, context -> new HuggingFaceElserService(httpFactory, serviceComponents), + context -> new HuggingFaceService(httpFactory, serviceComponents), context -> new OpenAiService(httpFactory, serviceComponents) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java deleted file mode 100644 index 03e0f4d8a4543..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference; - -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.rest.RestStatus; - -import java.util.Map; - -public record UnparsedModel(String modelId, TaskType taskType, String service, Map settings, Map secrets) { - - public static UnparsedModel unparsedModelFromMap(Map configMap, Map secretsMap) { - String modelId = removeStringOrThrowIfNull(configMap, ModelConfigurations.MODEL_ID); - String service = removeStringOrThrowIfNull(configMap, ModelConfigurations.SERVICE); - String taskTypeStr = removeStringOrThrowIfNull(configMap, TaskType.NAME); - TaskType taskType = TaskType.fromString(taskTypeStr); - - return new UnparsedModel(modelId, taskType, service, configMap, secretsMap); - } - - private static String removeStringOrThrowIfNull(Map sourceMap, String fieldName) { - String value = (String) sourceMap.remove(fieldName); - if (value == null) { - throw new ElasticsearchStatusException("Missing required field [{}]", RestStatus.BAD_REQUEST, fieldName); - } - return value; - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java new file mode 100644 index 0000000000000..2cf9168f60986 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; +import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceInferenceRequest; +import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceInferenceRequestEntity; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; + +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class HuggingFaceAction implements ExecutableAction { + private static final Logger logger = LogManager.getLogger(HuggingFaceAction.class); + + private final HuggingFaceAccount account; + private final String errorMessage; + private final RetryingHttpSender sender; + private final ResponseHandler responseHandler; + + public HuggingFaceAction( + Sender sender, + HuggingFaceModel model, + ServiceComponents serviceComponents, + ResponseHandler responseHandler, + String requestType + ) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + Objects.requireNonNull(requestType); + + this.responseHandler = Objects.requireNonNull(responseHandler); + + this.sender = new RetryingHttpSender( + Objects.requireNonNull(sender), + serviceComponents.throttlerManager(), + logger, + new RetrySettings(serviceComponents.settings()), + serviceComponents.threadPool() + ); + this.account = new HuggingFaceAccount(model.getUri(), model.getApiKey()); + this.errorMessage = format("Failed to send Hugging Face %s request to [%s]", requestType, model.getUri().toString()); + } + + @Override + public void execute(List input, ActionListener listener) { + try { + HuggingFaceInferenceRequest request = new HuggingFaceInferenceRequest(account, new HuggingFaceInferenceRequestEntity(input)); + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(request.createRequest(), responseHandler, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreator.java new file mode 100644 index 0000000000000..ba46519814b04 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreator.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceResponseHandler; +import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceElserResponseEntity; +import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceEmbeddingsResponseEntity; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; + +import java.util.Objects; + +/** + * Provides a way to construct an {@link ExecutableAction} using the visitor pattern based on the hugging face model type. + */ +public class HuggingFaceActionCreator implements HuggingFaceActionVisitor { + private final Sender sender; + private final ServiceComponents serviceComponents; + + public HuggingFaceActionCreator(Sender sender, ServiceComponents serviceComponents) { + this.sender = Objects.requireNonNull(sender); + this.serviceComponents = Objects.requireNonNull(serviceComponents); + } + + @Override + public ExecutableAction create(HuggingFaceEmbeddingsModel model) { + var responseHandler = new HuggingFaceResponseHandler( + "hugging face text embeddings", + HuggingFaceEmbeddingsResponseEntity::fromResponse + ); + + return new HuggingFaceAction(sender, model, serviceComponents, responseHandler, "text embeddings"); + } + + @Override + public ExecutableAction create(HuggingFaceElserModel model) { + var responseHandler = new HuggingFaceResponseHandler("hugging face elser", HuggingFaceElserResponseEntity::fromResponse); + + return new HuggingFaceAction(sender, model, serviceComponents, responseHandler, "ELSER"); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionVisitor.java new file mode 100644 index 0000000000000..070be8db50ff0 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionVisitor.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; + +public interface HuggingFaceActionVisitor { + ExecutableAction create(HuggingFaceEmbeddingsModel mode); + + ExecutableAction create(HuggingFaceElserModel mode); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java deleted file mode 100644 index fb648e2aabcfd..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserAction.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.action.huggingface; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.action.ExecutableAction; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; -import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; -import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceClient; -import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; -import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestEntity; -import org.elasticsearch.xpack.inference.services.ServiceComponents; -import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; - -import java.util.List; - -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; -import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; - -public class HuggingFaceElserAction implements ExecutableAction { - - private final HuggingFaceAccount account; - private final HuggingFaceClient client; - private final String errorMessage; - - public HuggingFaceElserAction(Sender sender, HuggingFaceElserModel model, ServiceComponents serviceComponents) { - this.client = new HuggingFaceClient(sender, serviceComponents); - this.account = new HuggingFaceAccount(model.getServiceSettings().uri(), model.getSecretSettings().apiKey()); - this.errorMessage = format("Failed to send ELSER Hugging Face request to [%s]", model.getServiceSettings().uri().toString()); - } - - @Override - public void execute(List input, ActionListener listener) { - try { - HuggingFaceElserRequest request = new HuggingFaceElserRequest(account, new HuggingFaceElserRequestEntity(input)); - ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - - client.send(request, wrappedListener); - } catch (ElasticsearchException e) { - listener.onFailure(e); - } catch (Exception e) { - listener.onFailure(createInternalServerError(e, errorMessage)); - } - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java new file mode 100644 index 0000000000000..31d987118c28d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.core.Strings.format; + +public abstract class BaseResponseHandler implements ResponseHandler { + + public static final String SERVER_ERROR = "Received a server error status code"; + public static final String RATE_LIMIT = "Received a rate limit status code"; + public static final String AUTHENTICATION = "Received an authentication error status code"; + public static final String REDIRECTION = "Unhandled redirection"; + public static final String UNSUCCESSFUL = "Received an unsuccessful status code"; + + protected final String requestType; + private final CheckedFunction parseFunction; + private final Function errorParseFunction; + + public BaseResponseHandler( + String requestType, + CheckedFunction parseFunction, + Function errorParseFunction + ) { + this.requestType = Objects.requireNonNull(requestType); + this.parseFunction = Objects.requireNonNull(parseFunction); + this.errorParseFunction = Objects.requireNonNull(errorParseFunction); + } + + @Override + public InferenceServiceResults parseResult(HttpResult result) throws RetryException { + try { + return parseFunction.apply(result); + } catch (Exception e) { + throw new RetryException(true, e); + } + } + + @Override + public String getRequestType() { + return requestType; + } + + protected Exception buildError(String message, HttpRequestBase request, HttpResult result) { + var errorEntityMsg = errorParseFunction.apply(result); + var responseStatusCode = result.response().getStatusLine().getStatusCode(); + + if (errorEntityMsg == null) { + return new ElasticsearchStatusException( + format("%s for request [%s] status [%s]", message, request.getRequestLine(), responseStatusCode), + toRestStatus(responseStatusCode) + ); + } + + return new ElasticsearchStatusException( + format( + "%s for request [%s] status [%s]. Error message: [%s]", + message, + request.getRequestLine(), + responseStatusCode, + errorEntityMsg.getErrorMessage() + ), + toRestStatus(responseStatusCode) + ); + } + + static RestStatus toRestStatus(int statusCode) { + RestStatus code = null; + if (statusCode < 500) { + code = RestStatus.fromCode(statusCode); + } + + return code == null ? RestStatus.BAD_REQUEST : code; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ErrorMessage.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ErrorMessage.java new file mode 100644 index 0000000000000..a4be7f15827fb --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ErrorMessage.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +public interface ErrorMessage { + String getErrorMessage(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java index 3fe8225927f06..b4598717e7fc8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java @@ -18,7 +18,11 @@ public RetryException(boolean shouldRetry, Throwable cause) { this.shouldRetry = shouldRetry; } - public RetryException(boolean shouldRetry, String msg) { + /** + * This should really only be used for testing. Ideally a retry exception would be associated with + * an actual exception that can be provided back to the client in the event that retrying fails. + */ + RetryException(boolean shouldRetry, String msg) { super(msg); this.shouldRetry = shouldRetry; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java index e91349cfbc2b7..70f2a9e0dde16 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java @@ -9,16 +9,19 @@ import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RetryableAction; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.IOException; +import java.net.UnknownHostException; import java.util.Objects; import java.util.concurrent.Executor; @@ -109,13 +112,22 @@ public boolean shouldRetry(Exception e) { /** * If the connection gets closed by the server or because of the connections time to live is exceeded we'll likely get a - * {@link org.apache.http.ConnectionClosedException} exception which is a child of IOException. For now, - * we'll consider all IOExceptions retryable because something failed while we were trying to send the request + * {@link org.apache.http.ConnectionClosedException} exception which is a child of IOException. + * * @param e the Exception received while sending the request * @return a {@link RetryException} if this exception can be retried */ private Exception transformIfRetryable(Exception e) { var exceptionToReturn = e; + + if (e instanceof UnknownHostException) { + return new ElasticsearchStatusException( + format("Invalid host [%s], please check that the URL is correct.", request.getURI()), + RestStatus.BAD_REQUEST, + e + ); + } + if (e instanceof IOException) { exceptionToReturn = new RetryException(true, e); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java deleted file mode 100644 index f24a5529a4663..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClient.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.huggingface; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.http.retry.AlwaysRetryingResponseHandler; -import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; -import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; -import org.elasticsearch.xpack.inference.external.http.retry.RetryingHttpSender; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; -import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequest; -import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceElserResponseEntity; -import org.elasticsearch.xpack.inference.services.ServiceComponents; - -import java.io.IOException; - -public class HuggingFaceClient { - private static final Logger logger = LogManager.getLogger(HuggingFaceClient.class); - private static final ResponseHandler ELSER_RESPONSE_HANDLER = createElserHandler(); - - private final RetryingHttpSender sender; - - public HuggingFaceClient(Sender sender, ServiceComponents serviceComponents) { - this.sender = new RetryingHttpSender( - sender, - serviceComponents.throttlerManager(), - logger, - new RetrySettings(serviceComponents.settings()), - serviceComponents.threadPool() - ); - } - - public void send(HuggingFaceElserRequest request, ActionListener listener) throws IOException { - this.sender.send(request.createRequest(), ELSER_RESPONSE_HANDLER, listener); - } - - private static ResponseHandler createElserHandler() { - return new AlwaysRetryingResponseHandler("elser hugging face", HuggingFaceElserResponseEntity::fromResponse); - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java new file mode 100644 index 0000000000000..eb7bc3d6a0b28 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceErrorResponseEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; + +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; + +public class HuggingFaceResponseHandler extends BaseResponseHandler { + + public HuggingFaceResponseHandler(String requestType, CheckedFunction parseFunction) { + super(requestType, parseFunction, HuggingFaceErrorResponseEntity::fromResponse); + } + + @Override + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + throws RetryException { + checkForFailureStatusCode(request, result); + checkForEmptyBody(throttlerManager, logger, request, result); + } + + /** + * Validates the status code and throws a RetryException if it is not in the range [200, 300). + * + * The Hugging Face error codes are loosely defined here. + * @param request the http request + * @param result the http response and body + * @throws RetryException thrown if status code is {@code >= 300 or < 200} + */ + void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throws RetryException { + int statusCode = result.response().getStatusLine().getStatusCode(); + if (statusCode >= 200 && statusCode < 300) { + return; + } + + if (statusCode == 503 || statusCode == 502 || statusCode == 429) { + throw new RetryException(true, buildError(RATE_LIMIT, request, result)); + } else if (statusCode >= 500) { + throw new RetryException(false, buildError(SERVER_ERROR, request, result)); + } else if (statusCode == 401) { + throw new RetryException(false, buildError(AUTHENTICATION, request, result)); + } else if (statusCode >= 300 && statusCode < 400) { + throw new RetryException(false, buildError(REDIRECTION, request, result)); + } else { + throw new RetryException(false, buildError(UNSUCCESSFUL, request, result)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java index af809f1be97f9..e31bc3b2fd41e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.inference.external.request.openai.OpenAiEmbeddingsRequest; import org.elasticsearch.xpack.inference.external.response.openai.OpenAiEmbeddingsResponseEntity; import org.elasticsearch.xpack.inference.services.ServiceComponents; -import org.elasticsearch.xpack.inference.services.openai.OpenAiResponseHandler; import java.io.IOException; @@ -43,6 +42,6 @@ public void send(OpenAiEmbeddingsRequest request, ActionListener OpenAiEmbeddingsResponseEntity.fromResponse(result)); + return new OpenAiResponseHandler("openai text embedding", OpenAiEmbeddingsResponseEntity::fromResponse); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java new file mode 100644 index 0000000000000..7609b734db4f5 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import org.elasticsearch.xpack.inference.external.response.openai.OpenAiErrorResponseEntity; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.io.IOException; + +import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; + +public class OpenAiResponseHandler extends BaseResponseHandler { + + public OpenAiResponseHandler(String requestType, CheckedFunction parseFunction) { + super(requestType, parseFunction, OpenAiErrorResponseEntity::fromResponse); + } + + @Override + public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) + throws RetryException { + checkForFailureStatusCode(request, result); + checkForEmptyBody(throttlerManager, logger, request, result); + } + + /** + * Validates the status code throws an RetryException if not in the range [200, 300). + * + * The OpenAI API error codes are documented here. + * @param request The http request + * @param result The http response and body + * @throws RetryException Throws if status code is {@code >= 300 or < 200 } + */ + void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throws RetryException { + int statusCode = result.response().getStatusLine().getStatusCode(); + if (statusCode >= 200 && statusCode < 300) { + return; + } + + // handle error codes + if (statusCode >= 500) { + throw new RetryException(false, buildError(SERVER_ERROR, request, result)); + } else if (statusCode == 429) { + throw new RetryException(false, buildError(RATE_LIMIT, request, result)); // TODO back off and retry + } else if (statusCode == 401) { + throw new RetryException(false, buildError(AUTHENTICATION, request, result)); + } else if (statusCode >= 300 && statusCode < 400) { + throw new RetryException(false, buildError(REDIRECTION, request, result)); + } else { + throw new RetryException(false, buildError(UNSUCCESSFUL, request, result)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java similarity index 86% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java index 563b0036bdb09..8b37439fc6c8a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java @@ -21,12 +21,12 @@ import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; -public class HuggingFaceElserRequest implements Request { +public class HuggingFaceInferenceRequest implements Request { private final HuggingFaceAccount account; - private final HuggingFaceElserRequestEntity entity; + private final HuggingFaceInferenceRequestEntity entity; - public HuggingFaceElserRequest(HuggingFaceAccount account, HuggingFaceElserRequestEntity entity) { + public HuggingFaceInferenceRequest(HuggingFaceAccount account, HuggingFaceInferenceRequestEntity entity) { this.account = Objects.requireNonNull(account); this.entity = Objects.requireNonNull(entity); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntity.java similarity index 85% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntity.java index 10ba249f9da7d..8656d3271a52e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntity.java @@ -14,11 +14,11 @@ import java.util.List; import java.util.Objects; -public record HuggingFaceElserRequestEntity(List inputs) implements ToXContentObject { +public record HuggingFaceInferenceRequestEntity(List inputs) implements ToXContentObject { private static final String INPUTS_FIELD = "inputs"; - public HuggingFaceElserRequestEntity { + public HuggingFaceInferenceRequestEntity { Objects.requireNonNull(inputs); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java new file mode 100644 index 0000000000000..4f4091873fba9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response; + +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.core.Strings.format; + +public class XContentUtils { + + public static void moveToFirstToken(XContentParser parser) throws IOException { + if (parser.currentToken() == null) { + parser.nextToken(); + } + } + + /** + * Iterates over the tokens until it finds a field name token with the text matching the field requested. + * + * @param parser parser to move + * @param field the field name to find + * @param errorMsgTemplate a template message to populate an exception if the field cannot be found + * @throws IllegalStateException if the field cannot be found + */ + public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { + XContentParser.Token token; + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { + parser.nextToken(); + return; + } + } + + throw new IllegalStateException(format(errorMsgTemplate, field)); + } + + private XContentUtils() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java index fab22dce889a5..7ef0d1cdbf3c7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -21,6 +21,8 @@ import java.util.Collections; import java.util.List; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; + public class HuggingFaceElserResponseEntity { /** @@ -58,9 +60,7 @@ public static SparseEmbeddingResults fromResponse(HttpResult response) throws IO var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { - if (jsonParser.currentToken() == null) { - jsonParser.nextToken(); - } + moveToFirstToken(jsonParser); List parsedEmbeddings = XContentParserUtils.parseList( jsonParser, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java new file mode 100644 index 0000000000000..fb7cbf5d49768 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java @@ -0,0 +1,161 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class HuggingFaceEmbeddingsResponseEntity { + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Hugging Face embeddings response"; + + /** + * Parse the response from hugging face. The known formats are an array of arrays and object with an {@code embeddings} field containing + * an array of arrays. + */ + public static TextEmbeddingResults fromResponse(HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + return parseArrayFormat(jsonParser); + } else if (token == XContentParser.Token.START_OBJECT) { + return parseObjectFormat(jsonParser); + } else { + throwUnknownToken(token, jsonParser); + } + } + + // This should never be reached. The above code should either return successfully or hit the throwUnknownToken + // or throw a parsing exception + throw new IllegalStateException("Reached an invalid state while parsing the hugging face response"); + } + + /** + * The response from hugging face could be formatted as [[0.1, ...], [0.1, ...]]. + * Each entry in the array will correspond to the entry within the inputs array within the request sent to hugging face. For example + * for a request like: + * + *
    +     *     
    +     *         {
    +     *             "inputs": ["hello this is my name", "I wish I was there!"]
    +     *         }
    +     *     
    +     * 
    + * + * The response would look like: + * + *
    +     *     
    +     *         [
    +     *              [
    +     *                  0.1,
    +     *                  0.234
    +     *              ],
    +     *              [
    +     *                  0.34,
    +     *                  0.56
    +     *              ]
    +     *         ]
    +     *     
    +     * 
    + * + * Example models with this response format: + * intfloat/e5-small-v2 + * intfloat/e5-base-v2 + * intfloat/multilingual-e5-base + * sentence-transformers/all-MiniLM-L6-v2 + * sentence-transformers/all-MiniLM-L12-v2 + */ + private static TextEmbeddingResults parseArrayFormat(XContentParser parser) throws IOException { + List embeddingList = XContentParserUtils.parseList( + parser, + HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry + ); + + return new TextEmbeddingResults(embeddingList); + } + + /** + * The response from hugging face could be formatted as {"embeddings": [[0.1, ...], [0.1, ...]}. + * Each entry in the array will correspond to the entry within the inputs array within the request sent to hugging face. For example + * for a request like: + * + *
    +     *     
    +     *         {
    +     *             "inputs": ["hello this is my name", "I wish I was there!"]
    +     *         }
    +     *     
    +     * 
    + * + * The response would look like: + * + *
    +     *     
    +     *         {
    +     *             "embeddings": [
    +     *                  [
    +     *                      0.1,
    +     *                      0.234
    +     *                  ],
    +     *                  [
    +     *                      0.34,
    +     *                      0.56
    +     *                  ]
    +     *             ]
    +     *         }
    +     *     
    +     * 
    + * + * Example models with this response format: + * intfloat/multilingual-e5-small + * sentence-transformers/all-mpnet-base-v2 + */ + private static TextEmbeddingResults parseObjectFormat(XContentParser parser) throws IOException { + positionParserAtTokenAfterField(parser, "embeddings", FAILED_TO_FIND_FIELD_TEMPLATE); + + List embeddingList = XContentParserUtils.parseList( + parser, + HuggingFaceEmbeddingsResponseEntity::parseEmbeddingEntry + ); + + return new TextEmbeddingResults(embeddingList); + } + + private static TextEmbeddingResults.Embedding parseEmbeddingEntry(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + + List embeddingValues = XContentParserUtils.parseList(parser, HuggingFaceEmbeddingsResponseEntity::parseEmbeddingList); + return new TextEmbeddingResults.Embedding(embeddingValues); + } + + private static float parseEmbeddingList(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser); + return parser.floatValue(); + } + + private HuggingFaceEmbeddingsResponseEntity() {} +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntity.java new file mode 100644 index 0000000000000..faeb7c6ac4fa9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntity.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorMessage; + +public record HuggingFaceErrorResponseEntity(String message) implements ErrorMessage { + /** + * An example error response for invalid auth would look like + * + * { + * "error": "A valid user token is required" + * } + * + * + * + * @param response The error response + * @return An error entity if the response is JSON with the above structure + * or null if the response does not contain the error field + */ + public static HuggingFaceErrorResponseEntity fromResponse(HttpResult response) { + try ( + XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, response.body()) + ) { + var responseMap = jsonParser.map(); + var error = (String) responseMap.get("error"); + if (error != null) { + return new HuggingFaceErrorResponseEntity(error); + } + } catch (Exception e) { + // swallow the error + } + + return null; + } + + @Override + public String getErrorMessage() { + return message; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java index c301ab2194415..b723cb0f86dea 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java @@ -19,9 +19,11 @@ import java.io.IOException; import java.util.List; -import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; public class OpenAiEmbeddingsResponseEntity { + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in OpenAI embeddings response"; /** * Parses the OpenAI json response. @@ -70,14 +72,12 @@ public static TextEmbeddingResults fromResponse(HttpResult response) throws IOEx var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { - if (jsonParser.currentToken() == null) { - jsonParser.nextToken(); - } + moveToFirstToken(jsonParser); XContentParser.Token token = jsonParser.currentToken(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); - positionParserAtTokenAfterField(jsonParser, "data"); + positionParserAtTokenAfterField(jsonParser, "data", FAILED_TO_FIND_FIELD_TEMPLATE); List embeddingList = XContentParserUtils.parseList( jsonParser, @@ -88,28 +88,10 @@ public static TextEmbeddingResults fromResponse(HttpResult response) throws IOEx } } - /** - * Iterates over the tokens until it finds a field name token with the text matching the field requested. - * - * @throws IllegalStateException if the field cannot be found - */ - private static void positionParserAtTokenAfterField(XContentParser parser, String field) throws IOException { - XContentParser.Token token; - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { - parser.nextToken(); - return; - } - } - - throw new IllegalStateException(format("Failed to find required field [%s] in OpenAI embeddings response", field)); - } - private static TextEmbeddingResults.Embedding parseEmbeddingObject(XContentParser parser) throws IOException { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - positionParserAtTokenAfterField(parser, "embedding"); + positionParserAtTokenAfterField(parser, "embedding", FAILED_TO_FIND_FIELD_TEMPLATE); List embeddingValues = XContentParserUtils.parseList(parser, OpenAiEmbeddingsResponseEntity::parseEmbeddingList); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntity.java index 10f42a8ec7d19..a364be29ada33 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntity.java @@ -12,11 +12,11 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ErrorMessage; -import java.io.IOException; import java.util.Map; -public class OpenAiErrorResponseEntity { +public class OpenAiErrorResponseEntity implements ErrorMessage { private final String errorMessage; @@ -60,7 +60,7 @@ public static OpenAiErrorResponseEntity fromResponse(HttpResult response) { return new OpenAiErrorResponseEntity(message); } } - } catch (IOException e) { + } catch (Exception e) { // swallow the error } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java index 05c664f7ceeea..aa2e0a81a59b2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java @@ -42,7 +42,7 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.inference.InferenceIndex; import org.elasticsearch.xpack.inference.InferenceSecretsIndex; -import org.elasticsearch.xpack.inference.services.MapParsingUtils; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.ArrayList; @@ -73,9 +73,9 @@ public static UnparsedModel unparsedModelFromMap(ModelConfigMap modelConfigMap) if (modelConfigMap.config() == null) { throw new ElasticsearchStatusException("Missing config map", RestStatus.BAD_REQUEST); } - String modelId = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); - String service = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); - String taskTypeStr = MapParsingUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); + String modelId = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.MODEL_ID); + String service = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), ModelConfigurations.SERVICE); + String taskTypeStr = ServiceUtils.removeStringOrThrowIfNull(modelConfigMap.config(), TaskType.NAME); TaskType taskType = TaskType.fromString(taskTypeStr); return new UnparsedModel(modelId, taskType, service, modelConfigMap.config(), modelConfigMap.secrets()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java new file mode 100644 index 0000000000000..bb45e8fd684a6 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/SenderService.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; + +public abstract class SenderService implements InferenceService { + private final SetOnce factory; + private final SetOnce serviceComponents; + private final AtomicReference sender = new AtomicReference<>(); + + public SenderService(SetOnce factory, SetOnce serviceComponents) { + this.factory = Objects.requireNonNull(factory); + this.serviceComponents = Objects.requireNonNull(serviceComponents); + } + + protected Sender getSender() { + return sender.get(); + } + + protected ServiceComponents getServiceComponents() { + return serviceComponents.get(); + } + + @Override + public void infer(Model model, List input, Map taskSettings, ActionListener listener) { + init(); + + doInfer(model, input, taskSettings, listener); + } + + protected abstract void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ); + + @Override + public void start(Model model, ActionListener listener) { + init(); + + doStart(model, listener); + } + + protected void doStart(Model model, ActionListener listener) { + listener.onResponse(true); + } + + private void init() { + sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); + sender.get().start(); + } + + @Override + public void close() throws IOException { + IOUtils.closeWhileHandlingException(sender.get()); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java similarity index 79% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 45bbddc92f135..597cd172ff661 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/MapParsingUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Strings; +import org.elasticsearch.inference.Model; import org.elasticsearch.rest.RestStatus; import java.net.URI; @@ -20,7 +21,7 @@ import static org.elasticsearch.core.Strings.format; -public class MapParsingUtils { +public class ServiceUtils { /** * Remove the object from the map and cast to the expected type. * If the object cannot be cast to type an ElasticsearchStatusException @@ -71,7 +72,7 @@ public static String removeStringOrThrowIfNull(Map sourceMap, St public static void throwIfNotEmptyMap(Map settingsMap, String serviceName) { if (settingsMap != null && settingsMap.isEmpty() == false) { - throw MapParsingUtils.unknownSettingsError(settingsMap, serviceName); + throw ServiceUtils.unknownSettingsError(settingsMap, serviceName); } } @@ -102,7 +103,7 @@ public static URI convertToUri(String url, String settingName, String settingSco try { return createUri(url); } catch (IllegalArgumentException ignored) { - validationException.addValidationError(MapParsingUtils.invalidUrlErrorMsg(url, settingName, settingScope)); + validationException.addValidationError(ServiceUtils.invalidUrlErrorMsg(url, settingName, settingScope)); return null; } } @@ -138,12 +139,12 @@ public static String extractRequiredString( String scope, ValidationException validationException ) { - String requiredField = MapParsingUtils.removeAsType(map, settingName, String.class); + String requiredField = ServiceUtils.removeAsType(map, settingName, String.class); if (requiredField == null) { - validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(settingName, scope)); + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(settingName, scope)); } else if (requiredField.isEmpty()) { - validationException.addValidationError(MapParsingUtils.mustBeNonEmptyString(settingName, scope)); + validationException.addValidationError(ServiceUtils.mustBeNonEmptyString(settingName, scope)); } if (validationException.validationErrors().isEmpty() == false) { @@ -159,10 +160,10 @@ public static String extractOptionalString( String scope, ValidationException validationException ) { - String optionalField = MapParsingUtils.removeAsType(map, settingName, String.class); + String optionalField = ServiceUtils.removeAsType(map, settingName, String.class); if (optionalField != null && optionalField.isEmpty()) { - validationException.addValidationError(MapParsingUtils.mustBeNonEmptyString(settingName, scope)); + validationException.addValidationError(ServiceUtils.mustBeNonEmptyString(settingName, scope)); } if (validationException.validationErrors().isEmpty() == false) { @@ -171,4 +172,19 @@ public static String extractOptionalString( return optionalField; } + + public static String parsePersistedConfigErrorMsg(String modelId, String serviceName) { + return format("Failed to parse stored model [%s] for [%s] service, please delete and add the service again", modelId, serviceName); + } + + public static ElasticsearchStatusException createInvalidModelException(Model model) { + return new ElasticsearchStatusException( + format( + "The internal model was invalid, please delete the service [%s] with id [%s] and add it again.", + model.getConfigurations().getService(), + model.getConfigurations().getModelId() + ), + RestStatus.INTERNAL_SERVER_ERROR + ); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java index 048920356aca0..7becc57999fb6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java @@ -32,8 +32,8 @@ import java.util.Set; import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; public class ElserMlNodeService implements InferenceService { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java index d1f27302f85f1..2ea7b080d059d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java @@ -15,7 +15,7 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.services.MapParsingUtils; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.Map; @@ -43,26 +43,24 @@ public class ElserMlNodeServiceSettings implements ServiceSettings { */ public static ElserMlNodeServiceSettings.Builder fromMap(Map map) { ValidationException validationException = new ValidationException(); - Integer numAllocations = MapParsingUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); - Integer numThreads = MapParsingUtils.removeAsType(map, NUM_THREADS, Integer.class); + Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); + Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); if (numAllocations == null) { validationException.addValidationError( - MapParsingUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) + ServiceUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) ); } else if (numAllocations < 1) { validationException.addValidationError(mustBeAPositiveNumberError(NUM_ALLOCATIONS, numAllocations)); } if (numThreads == null) { - validationException.addValidationError( - MapParsingUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS) - ); + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS)); } else if (numThreads < 1) { validationException.addValidationError(mustBeAPositiveNumberError(NUM_THREADS, numThreads)); } - String version = MapParsingUtils.removeAsType(map, MODEL_VERSION, String.class); + String version = ServiceUtils.removeAsType(map, MODEL_VERSION, String.class); if (version != null && ElserMlNodeService.VALID_ELSER_MODELS.contains(version) == false) { validationException.addValidationError("unknown ELSER model version [" + version + "]"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java new file mode 100644 index 0000000000000..a7dc26b8472d1 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionCreator; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.services.SenderService; +import org.elasticsearch.xpack.inference.services.ServiceComponents; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; + +public abstract class HuggingFaceBaseService extends SenderService { + + public HuggingFaceBaseService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + public HuggingFaceModel parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platformArchitectures + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + + var model = createModel( + modelId, + taskType, + serviceSettingsMap, + serviceSettingsMap, + TaskType.unsupportedTaskTypeErrorMsg(taskType, name()) + ); + + throwIfNotEmptyMap(config, name()); + throwIfNotEmptyMap(serviceSettingsMap, name()); + + return model; + } + + @Override + public HuggingFaceModel parsePersistedConfigWithSecrets( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); + + return createModel(modelId, taskType, serviceSettingsMap, secretSettingsMap, parsePersistedConfigErrorMsg(modelId, name())); + } + + @Override + public HuggingFaceModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + + return createModel(modelId, taskType, serviceSettingsMap, null, parsePersistedConfigErrorMsg(modelId, name())); + } + + protected abstract HuggingFaceModel createModel( + String modelId, + TaskType taskType, + Map serviceSettings, + Map secretSettings, + String failureMessage + ); + + @Override + public void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ) { + if (model instanceof HuggingFaceModel == false) { + listener.onFailure(createInvalidModelException(model)); + return; + } + + var huggingFaceModel = (HuggingFaceModel) model; + var actionCreator = new HuggingFaceActionCreator(getSender(), getServiceComponents()); + + var action = huggingFaceModel.accept(actionCreator); + action.execute(input, listener); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java new file mode 100644 index 0000000000000..d672afa99ea9d --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; + +import java.net.URI; + +public abstract class HuggingFaceModel extends Model { + public HuggingFaceModel(ModelConfigurations configurations, ModelSecrets secrets) { + super(configurations, secrets); + } + + public abstract ExecutableAction accept(HuggingFaceActionVisitor creator); + + public abstract URI getUri(); + + public abstract SecureString getApiKey(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java new file mode 100644 index 0000000000000..99e39f6f55912 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; + +import java.util.Map; + +public class HuggingFaceService extends HuggingFaceBaseService { + public static final String NAME = "hugging_face"; + + public HuggingFaceService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + protected HuggingFaceModel createModel( + String modelId, + TaskType taskType, + Map serviceSettings, + @Nullable Map secretSettings, + String failureMessage + ) { + return switch (taskType) { + case TEXT_EMBEDDING -> new HuggingFaceEmbeddingsModel(modelId, taskType, NAME, serviceSettings, secretSettings); + case SPARSE_EMBEDDING -> new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); + }; + } + + @Override + public String name() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_HF_SERVICE_ADDED; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java new file mode 100644 index 0000000000000..dc98990b1ef8c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; + +public record HuggingFaceServiceSettings(URI uri) implements ServiceSettings { + public static final String NAME = "hugging_face_service_settings"; + + static final String URL = "url"; + + public static HuggingFaceServiceSettings fromMap(Map map) { + return new HuggingFaceServiceSettings(extractUri(map, URL)); + } + + public static URI extractUri(Map map, String fieldName) { + ValidationException validationException = new ValidationException(); + + String parsedUrl = extractRequiredString(map, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + URI uri = convertToUri(parsedUrl, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return uri; + } + + public HuggingFaceServiceSettings { + Objects.requireNonNull(uri); + } + + public HuggingFaceServiceSettings(String url) { + this(createUri(url)); + } + + public HuggingFaceServiceSettings(StreamInput in) throws IOException { + this(in.readString()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(URL, uri.toString()); + builder.endObject(); + + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_HF_SERVICE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uri.toString()); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java index 6dc8ec280dc9d..24160387179ff 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java @@ -7,18 +7,41 @@ package org.elasticsearch.xpack.inference.services.huggingface.elser; -import org.elasticsearch.inference.Model; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; -public class HuggingFaceElserModel extends Model { +import java.net.URI; +import java.util.Map; + +public class HuggingFaceElserModel extends HuggingFaceModel { public HuggingFaceElserModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + @Nullable Map secrets + ) { + this( + modelId, + taskType, + service, + HuggingFaceElserServiceSettings.fromMap(serviceSettings), + HuggingFaceElserSecretSettings.fromMap(secrets) + ); + } + + HuggingFaceElserModel( String modelId, TaskType taskType, String service, HuggingFaceElserServiceSettings serviceSettings, - HuggingFaceElserSecretSettings secretSettings + @Nullable HuggingFaceElserSecretSettings secretSettings ) { super(new ModelConfigurations(modelId, taskType, service, serviceSettings), new ModelSecrets(secretSettings)); } @@ -32,4 +55,19 @@ public HuggingFaceElserServiceSettings getServiceSettings() { public HuggingFaceElserSecretSettings getSecretSettings() { return (HuggingFaceElserSecretSettings) super.getSecretSettings(); } + + @Override + public ExecutableAction accept(HuggingFaceActionVisitor creator) { + return creator.create(this); + } + + @Override + public URI getUri() { + return getServiceSettings().uri(); + } + + @Override + public SecureString getApiKey() { + return getSecretSettings().apiKey(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java index f2df48366f786..bf024e97f1e0a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,14 +22,18 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredSecureString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; public record HuggingFaceElserSecretSettings(SecureString apiKey) implements SecretSettings { public static final String NAME = "hugging_face_elser_secret_settings"; static final String API_KEY = "api_key"; - public static HuggingFaceElserSecretSettings fromMap(Map map) { + public static HuggingFaceElserSecretSettings fromMap(@Nullable Map map) { + if (map == null) { + return null; + } + ValidationException validationException = new ValidationException(); SecureString secureApiToken = extractRequiredSecureString(map, API_KEY, ModelSecrets.SECRET_SETTINGS, validationException); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index 8c978112c4ec3..c06b6a62db29a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -8,44 +8,24 @@ package org.elasticsearch.xpack.inference.services.huggingface.elser; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.core.IOUtils; -import org.elasticsearch.inference.InferenceService; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceElserAction; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceBaseService; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; -import java.io.IOException; -import java.util.List; import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.throwIfNotEmptyMap; - -public class HuggingFaceElserService implements InferenceService { +public class HuggingFaceElserService extends HuggingFaceBaseService { public static final String NAME = "hugging_face_elser"; - private final SetOnce factory; - private final SetOnce serviceComponents; - private final AtomicReference sender = new AtomicReference<>(); - public HuggingFaceElserService(SetOnce factory, SetOnce serviceComponents) { - this.factory = Objects.requireNonNull(factory); - this.serviceComponents = Objects.requireNonNull(serviceComponents); + super(factory, serviceComponents); } @Override @@ -54,86 +34,17 @@ public String name() { } @Override - public HuggingFaceElserModel parseRequestConfig( - String modelId, - TaskType taskType, - Map config, - Set platformArchitectures - ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - - HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); - HuggingFaceElserSecretSettings secretSettings = HuggingFaceElserSecretSettings.fromMap(serviceSettingsMap); - - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - - return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); - } - - @Override - public HuggingFaceElserModel parsePersistedConfigWithSecrets( + protected HuggingFaceModel createModel( String modelId, TaskType taskType, - Map config, - Map secrets + Map serviceSettings, + @Nullable Map secretSettings, + String failureMessage ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); - - HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); - HuggingFaceElserSecretSettings secretSettings = HuggingFaceElserSecretSettings.fromMap(secretSettingsMap); - - return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); - } - - @Override - public HuggingFaceElserModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - HuggingFaceElserServiceSettings serviceSettings = HuggingFaceElserServiceSettings.fromMap(serviceSettingsMap); - - return new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, null); - } - - @Override - public void infer(Model model, List input, Map taskSettings, ActionListener listener) { - if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) { - listener.onFailure( - new ElasticsearchStatusException( - TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME), - RestStatus.BAD_REQUEST - ) - ); - return; - } - - if (model instanceof HuggingFaceElserModel == false) { - listener.onFailure(new ElasticsearchException("The internal model was invalid")); - return; - } - - init(); - - HuggingFaceElserModel huggingFaceElserModel = (HuggingFaceElserModel) model; - HuggingFaceElserAction action = new HuggingFaceElserAction(sender.get(), huggingFaceElserModel, serviceComponents.get()); - - action.execute(input, listener); - } - - @Override - public void start(Model model, ActionListener listener) { - init(); - listener.onResponse(true); - } - - @Override - public void close() throws IOException { - IOUtils.closeWhileHandlingException(sender.get()); - } - - private void init() { - sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); - sender.get().start(); + return switch (taskType) { + case SPARSE_EMBEDDING -> new HuggingFaceElserModel(modelId, taskType, NAME, serviceSettings, secretSettings); + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); + }; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index 4b8213909f66b..dd185c4ca8385 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -9,10 +9,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,9 +19,8 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.convertToUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.createUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSettings { public static final String NAME = "hugging_face_elser_service_settings"; @@ -31,20 +28,7 @@ public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSetting static final String URL = "url"; public static HuggingFaceElserServiceSettings fromMap(Map map) { - ValidationException validationException = new ValidationException(); - - String parsedUrl = extractRequiredString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - URI uri = convertToUri(parsedUrl, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - return new HuggingFaceElserServiceSettings(uri); + return new HuggingFaceElserServiceSettings(extractUri(map, URL)); } public HuggingFaceElserServiceSettings { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java new file mode 100644 index 0000000000000..1f2e545a06901 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.embeddings; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.huggingface.HuggingFaceActionVisitor; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import java.net.URI; +import java.util.Map; + +public class HuggingFaceEmbeddingsModel extends HuggingFaceModel { + public HuggingFaceEmbeddingsModel( + String modelId, + TaskType taskType, + String service, + Map serviceSettings, + @Nullable Map secrets + ) { + this(modelId, taskType, service, HuggingFaceServiceSettings.fromMap(serviceSettings), DefaultSecretSettings.fromMap(secrets)); + } + + // Should only be used directly for testing + HuggingFaceEmbeddingsModel( + String modelId, + TaskType taskType, + String service, + HuggingFaceServiceSettings serviceSettings, + @Nullable DefaultSecretSettings secrets + ) { + super(new ModelConfigurations(modelId, taskType, service, serviceSettings), new ModelSecrets(secrets)); + } + + @Override + public HuggingFaceServiceSettings getServiceSettings() { + return (HuggingFaceServiceSettings) super.getServiceSettings(); + } + + @Override + public DefaultSecretSettings getSecretSettings() { + return (DefaultSecretSettings) super.getSecretSettings(); + } + + @Override + public URI getUri() { + return getServiceSettings().uri(); + } + + @Override + public SecureString getApiKey() { + return getSecretSettings().apiKey(); + } + + @Override + public ExecutableAction accept(HuggingFaceActionVisitor creator) { + return creator.create(this); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandler.java deleted file mode 100644 index b5b6b5df99862..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandler.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.openai; - -import org.apache.http.RequestLine; -import org.apache.http.client.methods.HttpRequestBase; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; -import org.elasticsearch.xpack.inference.external.http.retry.RetryException; -import org.elasticsearch.xpack.inference.external.response.openai.OpenAiErrorResponseEntity; -import org.elasticsearch.xpack.inference.logging.ThrottlerManager; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; - -public class OpenAiResponseHandler implements ResponseHandler { - - protected final String requestType; - private final CheckedFunction parseFunction; - - public OpenAiResponseHandler(String requestType, CheckedFunction parseFunction) { - this.requestType = Objects.requireNonNull(requestType); - this.parseFunction = Objects.requireNonNull(parseFunction); - } - - @Override - public void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequestBase request, HttpResult result) - throws RetryException { - checkForFailureStatusCode(request, result); - checkForEmptyBody(throttlerManager, logger, request, result); - } - - @Override - public InferenceServiceResults parseResult(HttpResult result) throws RetryException { - try { - return parseFunction.apply(result); - } catch (Exception e) { - throw new RetryException(true, e); - } - } - - @Override - public String getRequestType() { - return requestType; - } - - /** - * Validates the status code throws an RetryException if not in the range [200, 300). - * - * The OpenAI API error codes are document at https://platform.openai.com/docs/guides/error-codes/api-errors - * @param request The http request - * @param result The http response and body - * @throws RetryException Throws if status code is {@code >= 300 or < 200 } - */ - static void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throws RetryException { - int statusCode = result.response().getStatusLine().getStatusCode(); - if (statusCode >= 200 && statusCode < 300) { - return; - } - - // handle error codes - if (statusCode >= 500) { - String errorMsg = buildErrorMessageWithResponse( - "Received a server error status code for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); - } else if (statusCode == 429) { - String errorMsg = buildErrorMessageWithResponse( - "Received a rate limit status code for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); // TODO back off and retry - } else if (statusCode == 401) { - String errorMsg = buildErrorMessageWithResponse( - "Received a authentication error status code for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); - } else if (statusCode >= 300 && statusCode < 400) { - String errorMsg = buildErrorMessageWithResponse( - "Unhandled redirection for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); - } else { - String errorMsg = buildErrorMessageWithResponse( - "Received an unsuccessful status code for request [%s] status [%s]", - request.getRequestLine(), - statusCode, - result - ); - throw new RetryException(false, errorMsg); - } - } - - static String buildErrorMessageWithResponse(String baseMessage, RequestLine requestLine, int statusCode, HttpResult response) { - var errorEntity = OpenAiErrorResponseEntity.fromResponse(response); - - if (errorEntity == null) { - return format(baseMessage, requestLine, statusCode); - } else { - var base = format(baseMessage, requestLine, statusCode); - return base + ". Error message: [" + errorEntity.getErrorMessage() + "]"; - } - - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 0a7ae147d13d1..acf7b84bfccb1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -12,8 +12,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.core.IOUtils; -import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; @@ -22,31 +21,24 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; -import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; -public class OpenAiService implements InferenceService { +public class OpenAiService extends SenderService { public static final String NAME = "openai"; - private final SetOnce factory; - private final SetOnce serviceComponents; - private final AtomicReference sender = new AtomicReference<>(); - public OpenAiService(SetOnce factory, SetOnce serviceComponents) { - this.factory = Objects.requireNonNull(factory); - this.serviceComponents = Objects.requireNonNull(serviceComponents); + super(factory, serviceComponents); } @Override @@ -80,12 +72,12 @@ public OpenAiModel parseRequestConfig( return model; } - private OpenAiModel createModel( + private static OpenAiModel createModel( String modelId, TaskType taskType, Map serviceSettings, Map taskSettings, - Map secretSettings, + @Nullable Map secretSettings, String failureMessage ) { return switch (taskType) { @@ -105,22 +97,14 @@ public OpenAiModel parsePersistedConfigWithSecrets( Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); - OpenAiModel model = createModel( + return createModel( modelId, taskType, serviceSettingsMap, taskSettingsMap, secretSettingsMap, - format("Failed to parse stored model [%s] for [%s] service, please delete and add the service again", modelId, NAME) + parsePersistedConfigErrorMsg(modelId, NAME) ); - - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(secrets, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - throwIfNotEmptyMap(taskSettingsMap, NAME); - throwIfNotEmptyMap(secretSettingsMap, NAME); - - return model; } @Override @@ -128,63 +112,28 @@ public OpenAiModel parsePersistedConfig(String modelId, TaskType taskType, Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); - OpenAiModel model = createModel( - modelId, - taskType, - serviceSettingsMap, - taskSettingsMap, - null, - format("Failed to parse stored model [%s] for [%s] service, please delete and add the service again", modelId, NAME) - ); - - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - throwIfNotEmptyMap(taskSettingsMap, NAME); - - return model; + return createModel(modelId, taskType, serviceSettingsMap, taskSettingsMap, null, parsePersistedConfigErrorMsg(modelId, NAME)); } @Override - public void infer(Model model, List input, Map taskSettings, ActionListener listener) { - init(); - + public void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ) { if (model instanceof OpenAiModel == false) { - listener.onFailure( - new ElasticsearchStatusException( - format( - "The internal model was invalid, please delete the service [%s] with id [%s] and add it again.", - model.getConfigurations().getService(), - model.getConfigurations().getModelId() - ), - RestStatus.INTERNAL_SERVER_ERROR - ) - ); + listener.onFailure(createInvalidModelException(model)); return; } OpenAiModel openAiModel = (OpenAiModel) model; - var actionCreator = new OpenAiActionCreator(sender.get(), serviceComponents.get()); + var actionCreator = new OpenAiActionCreator(getSender(), getServiceComponents()); var action = openAiModel.accept(actionCreator, taskSettings); action.execute(input, listener); } - @Override - public void start(Model model, ActionListener listener) { - init(); - listener.onResponse(true); - } - - @Override - public void close() throws IOException { - IOUtils.closeWhileHandlingException(sender.get()); - } - - private void init() { - sender.updateAndGet(current -> Objects.requireNonNullElseGet(current, () -> factory.get().createSender(name()))); - sender.get().start(); - } - @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_OPENAI_ADDED; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java index adb947b01691e..6c7ff17e352d5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java @@ -21,9 +21,9 @@ import java.net.URI; import java.util.Map; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.convertToUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.createUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; /** * Defines the base settings for interacting with OpenAI. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 210b84d8ca31e..5e2c352d88a01 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.openai.embeddings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -25,7 +26,7 @@ public OpenAiEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, - Map secrets + @Nullable Map secrets ) { this( modelId, @@ -44,7 +45,7 @@ public OpenAiEmbeddingsModel( String service, OpenAiServiceSettings serviceSettings, OpenAiEmbeddingsTaskSettings taskSettings, - DefaultSecretSettings secrets + @Nullable DefaultSecretSettings secrets ) { super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secrets)); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java index 4933717192266..7df57516ad632 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java @@ -12,7 +12,7 @@ import java.util.Map; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.MODEL; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.USER; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java index 05781c03f9cb0..45a9ce1cabbc3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java @@ -21,8 +21,8 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractOptionalString; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; /** * Defines the task settings for the openai service. diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java index 3ad29d56a88be..2689634d75d98 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.xcontent.XContentBuilder; @@ -21,7 +22,7 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredSecureString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; /** * Contains secret settings that are common to all services. @@ -32,7 +33,11 @@ public record DefaultSecretSettings(SecureString apiKey) implements SecretSettin static final String API_KEY = "api_key"; - public static DefaultSecretSettings fromMap(Map map) { + public static DefaultSecretSettings fromMap(@Nullable Map map) { + if (map == null) { + return null; + } + ValidationException validationException = new ValidationException(); SecureString secureApiToken = extractRequiredSecureString(map, API_KEY, ModelSecrets.SECRET_SETTINGS, validationException); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java similarity index 50% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java index 65d665b71f8ee..c66f967de508f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceClientTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.huggingface; +package org.elasticsearch.xpack.inference.external.action.huggingface; import org.apache.http.HttpHeaders; import org.elasticsearch.ElasticsearchException; @@ -20,9 +20,12 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModelTests; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModelTests; import org.junit.After; import org.junit.Before; @@ -37,19 +40,16 @@ import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; -import static org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceElserRequestTests.createRequest; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; -import static org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests.buildExpectation; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; -public class HuggingFaceClientTests extends ESTestCase { +public class HuggingFaceActionCreatorTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); private final MockWebServer webServer = new MockWebServer(); private ThreadPool threadPool; @@ -59,7 +59,7 @@ public class HuggingFaceClientTests extends ESTestCase { public void init() throws Exception { webServer.start(); threadPool = createThreadPool(inferenceUtilityPool()); - clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mockThrottlerManager()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); } @After @@ -70,7 +70,7 @@ public void shutdown() throws IOException { } @SuppressWarnings("unchecked") - public void testSend_SuccessfulResponse() throws IOException, URISyntaxException { + public void testExecute_ReturnsSuccessfulResponse_ForElserAction() throws IOException { var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); try (var sender = senderFactory.createSender("test_service")) { @@ -85,19 +85,22 @@ public void testSend_SuccessfulResponse() throws IOException, URISyntaxException """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient( - sender, - new ServiceComponents(threadPool, mockThrottlerManager(), Settings.EMPTY) - ); + var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator(sender, createWithEmptySettings(threadPool)); + var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + action.execute(List.of("abc"), listener); var result = listener.actionGet(TIMEOUT); assertThat( result.asMap(), - is(buildExpectation(List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f), false)))) + is( + SparseEmbeddingResultsTests.buildExpectation( + List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f), false)) + ) + ) ); assertThat(webServer.requests(), hasSize(1)); @@ -117,7 +120,7 @@ public void testSend_SuccessfulResponse() throws IOException, URISyntaxException } @SuppressWarnings("unchecked") - public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyntaxException { + public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOException, URISyntaxException { var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); try (var sender = senderFactory.createSender("test_service")) { @@ -140,7 +143,8 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient( + var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator( sender, new ServiceComponents( threadPool, @@ -149,9 +153,10 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn buildSettingsWithRetryFields(TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1), TimeValue.timeValueSeconds(0)) ) ); + var action = actionCreator.create(model); PlainActionFuture listener = new PlainActionFuture<>(); - huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + action.execute(List.of("abc"), listener); var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); assertThat( @@ -175,20 +180,108 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn } } - public void testSend_ThrowsException() throws URISyntaxException, IOException { - var sender = mock(Sender.class); - doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + @SuppressWarnings("unchecked") + public void testExecute_ReturnsSuccessfulResponse_ForEmbeddingsAction() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123, + 0.123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator(sender, createWithEmptySettings(threadPool)); + var action = actionCreator.create(model); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var result = listener.actionGet(TIMEOUT); - HuggingFaceClient huggingFaceClient = new HuggingFaceClient( - sender, - new ServiceComponents(threadPool, mockThrottlerManager(), Settings.EMPTY) - ); - PlainActionFuture listener = new PlainActionFuture<>(); + assertThat(result.asMap(), is(TextEmbeddingResultsTests.buildExpectation(List.of(List.of(-0.0123F, 0.123F))))); - huggingFaceClient.send(createRequest(getUrl(webServer), "secret", "abc"), listener); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(thrownException.getMessage(), is("failed")); + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + var inputList = (List) requestMap.get("inputs"); + assertThat(inputList, contains("abc")); + } } + @SuppressWarnings("unchecked") + public void testSend_FailsFromInvalidResponseFormat_ForEmbeddingsAction() throws IOException, URISyntaxException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + // this will fail because the only valid formats are {"embeddings": [[...]]} or [[...]] + String responseJson = """ + [ + { + "embeddings": [ + [ + -0.0123, + 0.123 + ] + ] + { + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator( + sender, + new ServiceComponents( + threadPool, + mockThrottlerManager(), + // timeout as zero for no retries + buildSettingsWithRetryFields(TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1), TimeValue.timeValueSeconds(0)) + ) + ); + var action = actionCreator.create(model); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_ARRAY] but found [START_OBJECT]") + ); + + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(1)); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + var inputList = (List) requestMap.get("inputs"); + assertThat(inputList, contains("abc")); + } + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java new file mode 100644 index 0000000000000..7b1301a75a1fd --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.huggingface; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.AlwaysRetryingResponseHandler; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModelTests.createModel; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class HuggingFaceActionTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private static final String URl = "http://localhost:12345"; + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() throws IOException { + terminate(threadPool); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderThrows() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); + + var action = createAction(URl, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var action = createAction(URl, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Hugging Face test action request to [%s]", URl))); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); + + var action = createAction(URl, sender); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abc"), listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Hugging Face test action request to [%s]", URl))); + } + + private HuggingFaceAction createAction(String url, Sender sender) { + var model = createModel(url, "secret"); + + return new HuggingFaceAction( + sender, + model, + new ServiceComponents(threadPool, mock(ThrottlerManager.class), Settings.EMPTY), + new AlwaysRetryingResponseHandler("test", (result) -> null), + "test action" + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java deleted file mode 100644 index 6e1c2d528c467..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceElserActionTests.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.action.huggingface; - -import org.apache.http.HttpHeaders; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.http.MockResponse; -import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.inference.external.http.HttpClientManager; -import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.external.http.sender.Sender; -import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; -import org.elasticsearch.xpack.inference.services.ServiceComponents; -import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; -import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; -import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserServiceSettings; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; -import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; -import static org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests.buildExpectation; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.doThrow; -import static org.mockito.Mockito.mock; - -public class HuggingFaceElserActionTests extends ESTestCase { - private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); - private final MockWebServer webServer = new MockWebServer(); - private ThreadPool threadPool; - private HttpClientManager clientManager; - - @Before - public void init() throws Exception { - webServer.start(); - threadPool = createThreadPool(inferenceUtilityPool()); - clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); - } - - @After - public void shutdown() throws IOException { - clientManager.close(); - terminate(threadPool); - webServer.close(); - } - - @SuppressWarnings("unchecked") - public void testExecute_ReturnsSuccessfulResponse() throws IOException { - var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); - - try (var sender = senderFactory.createSender("test_service")) { - sender.start(); - - String responseJson = """ - [ - { - ".": 0.133155956864357 - } - ] - """; - webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - - var action = createAction(getUrl(webServer), sender); - - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); - - var result = listener.actionGet(TIMEOUT); - - assertThat( - result.asMap(), - is(buildExpectation(List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f), false)))) - ); - - assertThat(webServer.requests(), hasSize(1)); - assertNull(webServer.requests().get(0).getUri().getQuery()); - assertThat( - webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), - equalTo(XContentType.JSON.mediaTypeWithoutParameters()) - ); - assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); - - var requestMap = entityAsMap(webServer.requests().get(0).getBody()); - assertThat(requestMap.size(), is(1)); - assertThat(requestMap.get("inputs"), instanceOf(List.class)); - var inputList = (List) requestMap.get("inputs"); - assertThat(inputList, contains("abc")); - } - } - - public void testExecute_ThrowsURISyntaxException_ForInvalidUrl() throws IOException { - try (var sender = mock(Sender.class)) { - var thrownException = expectThrows(IllegalArgumentException.class, () -> createAction("^^", sender)); - assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); - } - } - - public void testExecute_ThrowsElasticsearchException_WhenSenderThrows() { - var sender = mock(Sender.class); - doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any()); - - var action = createAction(getUrl(webServer), sender); - - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); - - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat(thrownException.getMessage(), is("failed")); - } - - public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { - var sender = mock(Sender.class); - - doAnswer(invocation -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onFailure(new IllegalStateException("failed")); - - return Void.TYPE; - }).when(sender).send(any(), any()); - - var action = createAction(getUrl(webServer), sender); - - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); - - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat(thrownException.getMessage(), is(format("Failed to send ELSER Hugging Face request to [%s]", getUrl(webServer)))); - } - - public void testExecute_ThrowsException() { - var sender = mock(Sender.class); - doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any()); - - var action = createAction(getUrl(webServer), sender); - - PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(List.of("abc"), listener); - - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat(thrownException.getMessage(), is(format("Failed to send ELSER Hugging Face request to [%s]", getUrl(webServer)))); - } - - private HuggingFaceElserAction createAction(String url, Sender sender) { - var model = new HuggingFaceElserModel( - "id", - TaskType.SPARSE_EMBEDDING, - "service", - new HuggingFaceElserServiceSettings(url), - new HuggingFaceElserSecretSettings(new SecureString("secret".toCharArray())) - ); - - return new HuggingFaceElserAction(sender, model, new ServiceComponents(threadPool, mock(ThrottlerManager.class), Settings.EMPTY)); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandlerTests.java new file mode 100644 index 0000000000000..b7095979b0fa5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandlerTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler.toRestStatus; +import static org.hamcrest.core.Is.is; + +public class BaseResponseHandlerTests extends ESTestCase { + public void testToRestStatus_ReturnsBadRequest_WhenStatusIs500() { + assertThat(toRestStatus(500), is(RestStatus.BAD_REQUEST)); + } + + public void testToRestStatus_ReturnsBadRequest_WhenStatusIs501() { + assertThat(toRestStatus(501), is(RestStatus.BAD_REQUEST)); + } + + public void testToRestStatus_ReturnsStatusCodeValue_WhenStatusIs200() { + assertThat(toRestStatus(200), is(RestStatus.OK)); + } + + public void testToRestStatus_ReturnsBadRequest_WhenStatusIsUnknown() { + assertThat(toRestStatus(1000), is(RestStatus.BAD_REQUEST)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java new file mode 100644 index 0000000000000..9bebddc9ebb87 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.huggingface; + +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class HuggingFaceResponseHandlerTests extends ESTestCase { + + public void testCheckForFailureStatusCode() { + var statusLine = mock(StatusLine.class); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var httpRequest = mock(HttpRequestBase.class); + + var httpResult = new HttpResult(httpResponse, new byte[] {}); + + var handler = new HuggingFaceResponseHandler("", result -> null); + + // 200 ok + when(statusLine.getStatusCode()).thenReturn(200); + handler.checkForFailureStatusCode(httpRequest, httpResult); + // 503 + when(statusLine.getStatusCode()).thenReturn(503); + var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a rate limit status code for request [null] status [503]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 502 + when(statusLine.getStatusCode()).thenReturn(502); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a rate limit status code for request [null] status [502]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 429 + when(statusLine.getStatusCode()).thenReturn(429); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a rate limit status code for request [null] status [429]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); + // 401 + when(statusLine.getStatusCode()).thenReturn(401); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an authentication error status code for request [null] status [401]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.UNAUTHORIZED)); + // 300 + when(statusLine.getStatusCode()).thenReturn(300); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Unhandled redirection for request [null] status [300]")); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.MULTIPLE_CHOICES)); + // 402 + when(statusLine.getStatusCode()).thenReturn(402); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an unsuccessful status code for request [null] status [402]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java new file mode 100644 index 0000000000000..56495b053e172 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.openai; + +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.HttpRequestBase; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.RetryException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class OpenAiResponseHandlerTests extends ESTestCase { + + public void testCheckForFailureStatusCode() { + var statusLine = mock(StatusLine.class); + + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + var httpRequest = mock(HttpRequestBase.class); + + var httpResult = new HttpResult(httpResponse, new byte[] {}); + + var handler = new OpenAiResponseHandler("", result -> null); + + // 200 ok + when(statusLine.getStatusCode()).thenReturn(200); + handler.checkForFailureStatusCode(httpRequest, httpResult); + // 503 + when(statusLine.getStatusCode()).thenReturn(503); + var retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a server error status code for request [null] status [503]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 429 + when(statusLine.getStatusCode()).thenReturn(429); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received a rate limit status code for request [null] status [429]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); + // 401 + when(statusLine.getStatusCode()).thenReturn(401); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an authentication error status code for request [null] status [401]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.UNAUTHORIZED)); + // 300 + when(statusLine.getStatusCode()).thenReturn(300); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Unhandled redirection for request [null] status [300]")); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.MULTIPLE_CHOICES)); + // 402 + when(statusLine.getStatusCode()).thenReturn(402); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an unsuccessful status code for request [null] status [402]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java index 06279e9c89da6..738ab3d155bc4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java @@ -21,7 +21,7 @@ public class HuggingFaceElserRequestEntityTests extends ESTestCase { public void testXContent() throws IOException { - var entity = new HuggingFaceElserRequestEntity(List.of("abc")); + var entity = new HuggingFaceInferenceRequestEntity(List.of("abc")); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); entity.toXContent(builder, null); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java index 2a8ce9a46e498..1a5eb7fb8845c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java @@ -44,10 +44,10 @@ public void testCreateRequest() throws URISyntaxException, IOException { assertThat(inputList, contains("abc")); } - public static HuggingFaceElserRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { + public static HuggingFaceInferenceRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { var account = new HuggingFaceAccount(new URI(url), new SecureString(apiKey.toCharArray())); - var entity = new HuggingFaceElserRequestEntity(List.of(input)); + var entity = new HuggingFaceInferenceRequestEntity(List.of(input)); - return new HuggingFaceElserRequest(account, entity); + return new HuggingFaceInferenceRequest(account, entity); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java new file mode 100644 index 0000000000000..e3f14ad085761 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java @@ -0,0 +1,339 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class HuggingFaceEmbeddingsResponseEntityTests extends ESTestCase { + public void testFromResponse_CreatesResultsForASingleItem_ArrayFormat() throws IOException { + String responseJson = """ + [ + [ + 0.014539449, + -0.015288644 + ] + ] + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(0.014539449F, -0.015288644F))))); + } + + public void testFromResponse_CreatesResultsForASingleItem_ObjectFormat() throws IOException { + String responseJson = """ + { + "embeddings": [ + [ + 0.014539449, + -0.015288644 + ] + ] + } + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(0.014539449F, -0.015288644F))))); + } + + public void testFromResponse_CreatesResultsForMultipleItems_ArrayFormat() throws IOException { + String responseJson = """ + [ + [ + 0.014539449, + -0.015288644 + ], + [ + 0.0123, + -0.0123 + ] + ] + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.embeddings(), + is( + List.of( + new TextEmbeddingResults.Embedding(List.of(0.014539449F, -0.015288644F)), + new TextEmbeddingResults.Embedding(List.of(0.0123F, -0.0123F)) + ) + ) + ); + } + + public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throws IOException { + String responseJson = """ + { + "embeddings": [ + [ + 0.014539449, + -0.015288644 + ], + [ + 0.0123, + -0.0123 + ] + ] + } + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.embeddings(), + is( + List.of( + new TextEmbeddingResults.Embedding(List.of(0.014539449F, -0.015288644F)), + new TextEmbeddingResults.Embedding(List.of(0.0123F, -0.0123F)) + ) + ) + ); + } + + public void testFromResponse_FailsWhenArrayOfObjects() { + String responseJson = """ + [ + {} + ] + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_ARRAY] but found [START_OBJECT]") + ); + } + + public void testFromResponse_FailsWhenEmbeddingsFieldIsNotPresent() { + String responseJson = """ + { + "not_embeddings": [ + [ + 0.014539449, + -0.015288644 + ] + ] + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [embeddings] in Hugging Face embeddings response")); + } + + public void testFromResponse_FailsWhenEmbeddingsFieldNotAnArray() { + String responseJson = """ + { + "embeddings": { + "a": [ + 0.014539449, + -0.015288644 + ] + } + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_ARRAY] but found [START_OBJECT]") + ); + } + + public void testFromResponse_FailsWhenEmbeddingValueIsAString_ArrayFormat() { + String responseJson = """ + [ + [ + "abc" + ] + ] + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]") + ); + } + + public void testFromResponse_FailsWhenEmbeddingValueIsAString_ObjectFormat() { + String responseJson = """ + { + "embeddings": [ + [ + "abc" + ] + ] + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [VALUE_STRING]") + ); + } + + public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ArrayFormat() throws IOException { + String responseJson = """ + [ + [ + 1 + ] + ] + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(1.0F))))); + } + + public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ObjectFormat() throws IOException { + String responseJson = """ + { + "embeddings": [ + [ + 1 + ] + ] + } + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(1.0F))))); + } + + public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ArrayFormat() throws IOException { + String responseJson = """ + [ + [ + 40294967295 + ] + ] + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(4.0294965E10F))))); + } + + public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ObjectFormat() throws IOException { + String responseJson = """ + { + "embeddings": [ + [ + 40294967295 + ] + ] + } + """; + + TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(parsedResults.embeddings(), is(List.of(new TextEmbeddingResults.Embedding(List.of(4.0294965E10F))))); + } + + public void testFromResponse_FailsWhenEmbeddingValueIsAnObject_ObjectFormat() { + String responseJson = """ + { + "embeddings": [ + [ + {} + ] + ] + } + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") + ); + } + + public void testFromResponse_FailsWithUnknownToken() { + String responseJson = """ + "super" + """; + + var thrownException = expectThrows( + ParsingException.class, + () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to parse object: unexpected token [VALUE_STRING] found")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntityTests.java new file mode 100644 index 0000000000000..ed381de844731 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceErrorResponseEntityTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.huggingface; + +import org.apache.http.HttpResponse; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.nio.charset.StandardCharsets; + +import static org.mockito.Mockito.mock; + +public class HuggingFaceErrorResponseEntityTests extends ESTestCase { + public void testFromResponse() { + String responseJson = """ + { + "error": "A valid user token is required" + } + """; + + HuggingFaceErrorResponseEntity errorMessage = HuggingFaceErrorResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + assertNotNull(errorMessage); + assertEquals("A valid user token is required", errorMessage.getErrorMessage()); + } + + public void testFromResponse_noMessage() { + String responseJson = """ + { + "error": { + "type": "invalid_request_error" + } + } + """; + + HuggingFaceErrorResponseEntity errorMessage = HuggingFaceErrorResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + assertNull(errorMessage); + } + + public void testFromResponse_noError() { + String responseJson = """ + { + "something": { + "not": "relevant" + } + } + """; + + HuggingFaceErrorResponseEntity errorMessage = HuggingFaceErrorResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + assertNull(errorMessage); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java index 56d8171640b53..2301be28f62c4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java @@ -317,37 +317,4 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAnObject() { is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") ); } - - public void testFromResponse_FailsWhenIsMissingFinalClosingBracket() { - String responseJson = """ - { - "object": "list", - "data": [ - { - "object": "embedding", - "index": 0, - "embedding": [ - {} - ] - } - ], - "model": "text-embedding-ada-002-v2", - "usage": { - "prompt_tokens": 8, - "total_tokens": 8 - } - """; - - var thrownException = expectThrows( - ParsingException.class, - () -> OpenAiEmbeddingsResponseEntity.fromResponse( - new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) - ) - ); - - assertThat( - thrownException.getMessage(), - is("Failed to parse object: expecting token of type [VALUE_NUMBER] but found [START_OBJECT]") - ); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntityTests.java index 4e3465e24c951..4dc6c4190f92c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiErrorResponseEntityTests.java @@ -49,7 +49,7 @@ public void testFromResponse_noMessage() { assertNull(errorMessage); } - public void testFromResponse_noErro() { + public void testFromResponse_noError() { String responseJson = """ { "something": { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java index 43928da8ed3b3..0f37ac87fe45a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java @@ -19,7 +19,7 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.services.MapParsingUtils; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.Map; @@ -73,12 +73,10 @@ public record TestServiceSettings(String model) implements ServiceSettings { public static TestServiceSettings fromMap(Map map) { ValidationException validationException = new ValidationException(); - String model = MapParsingUtils.removeAsType(map, "model", String.class); + String model = ServiceUtils.removeAsType(map, "model", String.class); if (model == null) { - validationException.addValidationError( - MapParsingUtils.missingSettingErrorMsg("model", ModelConfigurations.SERVICE_SETTINGS) - ); + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg("model", ModelConfigurations.SERVICE_SETTINGS)); } if (validationException.validationErrors().isEmpty() == false) { @@ -121,7 +119,7 @@ public record TestTaskSettings(Integer temperature) implements TaskSettings { private static final String NAME = "test_task_settings"; public static TestTaskSettings fromMap(Map map) { - Integer temperature = MapParsingUtils.removeAsType(map, "temperature", Integer.class); + Integer temperature = ServiceUtils.removeAsType(map, "temperature", Integer.class); return new TestTaskSettings(temperature); } @@ -162,10 +160,10 @@ public record TestSecretSettings(String apiKey) implements SecretSettings { public static TestSecretSettings fromMap(Map map) { ValidationException validationException = new ValidationException(); - String apiKey = MapParsingUtils.removeAsType(map, "api_key", String.class); + String apiKey = ServiceUtils.removeAsType(map, "api_key", String.class); if (apiKey == null) { - validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("api_key", ModelSecrets.SECRET_SETTINGS)); + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg("api_key", ModelSecrets.SECRET_SETTINGS)); } if (validationException.validationErrors().isEmpty() == false) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java new file mode 100644 index 0000000000000..fb61a86c7b9c4 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -0,0 +1,143 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class SenderServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() throws IOException { + terminate(threadPool); + } + + public void testStart_InitializesTheSender() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSenderFactory.class); + when(factory.createSender(anyString())).thenReturn(sender); + + try (var service = new TestSenderService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.start(mock(Model.class), listener); + + listener.actionGet(TIMEOUT); + verify(sender, times(1)).start(); + verify(factory, times(1)).createSender(anyString()); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + public void testStart_CallingStartTwiceKeepsSameSenderReference() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSenderFactory.class); + when(factory.createSender(anyString())).thenReturn(sender); + + try (var service = new TestSenderService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.start(mock(Model.class), listener); + listener.actionGet(TIMEOUT); + + service.start(mock(Model.class), listener); + listener.actionGet(TIMEOUT); + + verify(factory, times(1)).createSender(anyString()); + verify(sender, times(2)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + private static final class TestSenderService extends SenderService { + TestSenderService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + protected void doInfer( + Model model, + List input, + Map taskSettings, + ActionListener listener + ) { + + } + + @Override + public String name() { + return "test service"; + } + + @Override + public Model parseRequestConfig(String modelId, TaskType taskType, Map config, Set platfromArchitectures) { + return null; + } + + @Override + public Model parsePersistedConfigWithSecrets( + String modelId, + TaskType taskType, + Map config, + Map secrets + ) { + return null; + } + + @Override + public Model parsePersistedConfig(String modelId, TaskType taskType, Map config) { + return null; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java similarity index 88% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 9ff23ea38541d..eb54745806a68 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/MapParsingUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -14,34 +14,34 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.convertToUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.createUri; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractOptionalString; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredSecureString; -import static org.elasticsearch.xpack.inference.services.MapParsingUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -public class MapParsingUtilsTests extends ESTestCase { +public class ServiceUtilsTests extends ESTestCase { public void testRemoveAsTypeWithTheCorrectType() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 1.0)); - Integer i = MapParsingUtils.removeAsType(map, "a", Integer.class); + Integer i = ServiceUtils.removeAsType(map, "a", Integer.class); assertEquals(Integer.valueOf(5), i); assertNull(map.get("a")); // field has been removed - String str = MapParsingUtils.removeAsType(map, "b", String.class); + String str = ServiceUtils.removeAsType(map, "b", String.class); assertEquals("a string", str); assertNull(map.get("b")); - Boolean b = MapParsingUtils.removeAsType(map, "c", Boolean.class); + Boolean b = ServiceUtils.removeAsType(map, "c", Boolean.class); assertEquals(Boolean.TRUE, b); assertNull(map.get("c")); - Double d = MapParsingUtils.removeAsType(map, "d", Double.class); + Double d = ServiceUtils.removeAsType(map, "d", Double.class); assertEquals(Double.valueOf(1.0), d); assertNull(map.get("d")); @@ -51,20 +51,20 @@ public void testRemoveAsTypeWithTheCorrectType() { public void testRemoveAsTypeWithInCorrectType() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 5.0, "e", 5)); - var e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "a", String.class)); + var e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "a", String.class)); assertThat( e.getMessage(), containsString("field [a] is not of the expected type. The value [5] cannot be converted to a [String]") ); - e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "b", Boolean.class)); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "b", Boolean.class)); assertThat( e.getMessage(), containsString("field [b] is not of the expected type. The value [a string] cannot be converted to a [Boolean]") ); assertNull(map.get("b")); - e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "c", Integer.class)); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "c", Integer.class)); assertThat( e.getMessage(), containsString("field [c] is not of the expected type. The value [true] cannot be converted to a [Integer]") @@ -72,7 +72,7 @@ public void testRemoveAsTypeWithInCorrectType() { assertNull(map.get("c")); // cannot convert double to integer - e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "d", Integer.class)); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "d", Integer.class)); assertThat( e.getMessage(), containsString("field [d] is not of the expected type. The value [5.0] cannot be converted to a [Integer]") @@ -80,7 +80,7 @@ public void testRemoveAsTypeWithInCorrectType() { assertNull(map.get("d")); // cannot convert integer to double - e = expectThrows(ElasticsearchStatusException.class, () -> MapParsingUtils.removeAsType(map, "e", Double.class)); + e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "e", Double.class)); assertThat( e.getMessage(), containsString("field [e] is not of the expected type. The value [5] cannot be converted to a [Double]") @@ -92,7 +92,7 @@ public void testRemoveAsTypeWithInCorrectType() { public void testRemoveAsTypeMissingReturnsNull() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE)); - assertNull(MapParsingUtils.removeAsType(new HashMap<>(), "missing", Integer.class)); + assertNull(ServiceUtils.removeAsType(new HashMap<>(), "missing", Integer.class)); assertThat(map.entrySet(), hasSize(3)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java new file mode 100644 index 0000000000000..59abda79abad0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/Utils.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class Utils { + public static Model getInvalidModel(String modelId, String serviceName) { + var mockConfigs = mock(ModelConfigurations.class); + when(mockConfigs.getModelId()).thenReturn(modelId); + when(mockConfigs.getService()).thenReturn(serviceName); + + var mockModel = mock(Model.class); + when(mockModel.getConfigurations()).thenReturn(mockConfigs); + + return mockModel; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java new file mode 100644 index 0000000000000..d5eb6e76b622b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; +import static org.hamcrest.CoreMatchers.is; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class HuggingFaceBaseServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private ThreadPool threadPool; + + @Before + public void init() throws Exception { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() throws IOException { + terminate(threadPool); + } + + public void testInfer_ThrowsErrorWhenModelIsNotHuggingFaceModel() throws IOException { + var sender = mock(Sender.class); + + var factory = mock(HttpRequestSenderFactory.class); + when(factory.createSender(anyString())).thenReturn(sender); + + var mockModel = getInvalidModel("model_id", "service_name"); + + try (var service = new TestService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(mockModel, List.of(""), new HashMap<>(), listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is("The internal model was invalid, please delete the service [service_name] with id [model_id] and add it again.") + ); + + verify(factory, times(1)).createSender(anyString()); + verify(sender, times(1)).start(); + } + + verify(sender, times(1)).close(); + verifyNoMoreInteractions(factory); + verifyNoMoreInteractions(sender); + } + + private static final class TestService extends HuggingFaceBaseService { + + TestService(SetOnce factory, SetOnce serviceComponents) { + super(factory, serviceComponents); + } + + @Override + public String name() { + return "test"; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersion.current(); + } + + @Override + protected HuggingFaceModel createModel( + String modelId, + TaskType taskType, + Map serviceSettings, + Map secretSettings, + String failureMessage + ) { + return null; + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java new file mode 100644 index 0000000000000..001d869f67a5c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class HuggingFaceServiceSettingsTests extends AbstractWireSerializingTestCase { + + public static HuggingFaceServiceSettings createRandom() { + return new HuggingFaceServiceSettings(randomAlphaOfLength(15)); + } + + public void testFromMap() { + var url = "https://www.abc.com"; + var serviceSettings = HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, url))); + + assertThat(serviceSettings, is(new HuggingFaceServiceSettings(url))); + } + + public void testFromMap_MissingUrl_ThrowsError() { + var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceServiceSettings.fromMap(new HashMap<>())); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] does not contain the required setting [%s];", + HuggingFaceServiceSettings.URL + ) + ) + ); + } + + public void testFromMap_EmptyUrl_ThrowsError() { + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, ""))) + ); + + assertThat( + thrownException.getMessage(), + containsString( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", + HuggingFaceServiceSettings.URL + ) + ) + ); + } + + public void testFromMap_InvalidUrl_ThrowsError() { + var url = "https://www.abc^.com"; + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, url))) + ); + + assertThat( + thrownException.getMessage(), + is( + Strings.format( + "Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", + url, + HuggingFaceServiceSettings.URL + ) + ) + ); + } + + @Override + protected Writeable.Reader instanceReader() { + return HuggingFaceServiceSettings::new; + } + + @Override + protected HuggingFaceServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected HuggingFaceServiceSettings mutateInstance(HuggingFaceServiceSettings instance) throws IOException { + return createRandom(); + } + + public static Map getServiceSettingsMap(String url) { + var map = new HashMap(); + + map.put(HuggingFaceServiceSettings.URL, url); + + return map; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java new file mode 100644 index 0000000000000..fa31d026b16f5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -0,0 +1,589 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface; + +import org.apache.http.HttpHeaders; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; +import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModelTests; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; +import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModelTests; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettingsTests.getServiceSettingsMap; +import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; + +public class HuggingFaceServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testParseRequestConfig_CreatesAnEmbeddingsModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var model = service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")), + Set.of() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParseRequestConfig_CreatesAnElserModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var model = service.parseRequestConfig( + "id", + TaskType.SPARSE_EMBEDDING, + getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")), + Set.of() + ); + + assertThat(model, instanceOf(HuggingFaceElserModel.class)); + + var embeddingsModel = (HuggingFaceElserModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var config = getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + config.put("extra_key", "value"); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMap() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettings = getServiceSettingsMap("url"); + serviceSettings.put("extra_key", "value"); + + var config = getRequestConfigMap(serviceSettings, getSecretSettingsMap("secret")); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } + } + + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var secretSettingsMap = getSecretSettingsMap("secret"); + secretSettingsMap.put("extra_key", "value"); + + var config = getRequestConfigMap(getServiceSettingsMap("url"), secretSettingsMap); + + var thrownException = expectThrows( + ElasticsearchStatusException.class, + () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ); + + assertThat( + thrownException.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAnEmbeddingsModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAnElserModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.SPARSE_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceElserModel.class)); + + var embeddingsModel = (HuggingFaceElserModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var secretSettingsMap = getSecretSettingsMap("secret"); + secretSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), secretSettingsMap); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecrets() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); + persistedConfig.secrets.put("extra_key", "value"); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettingsMap = getServiceSettingsMap("url"); + serviceSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = new HashMap(); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), taskSettingsMap, getSecretSettingsMap("secret")); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfig_CreatesAnEmbeddingsModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url")); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesAnElserModel() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url")); + + var model = service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceElserModel.class)); + + var embeddingsModel = (HuggingFaceElserModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url")); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettingsMap = getServiceSettingsMap("url"); + serviceSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(serviceSettingsMap); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try ( + var service = new HuggingFaceService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = new HashMap(); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url"), taskSettingsMap, null); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testInfer_SendsEmbeddingsRequest() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new HuggingFaceService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123, + 0.0123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(model, List.of("abc"), new HashMap<>(), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), Matchers.is(buildExpectation(List.of(List.of(-0.0123F, 0.0123F))))); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), Matchers.is(1)); + assertThat(requestMap.get("inputs"), Matchers.is(List.of("abc"))); + } + } + + public void testInfer_SendsElserRequest() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new HuggingFaceService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + [ + { + ".": 0.133155956864357 + } + ] + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceElserModelTests.createModel(getUrl(webServer), "secret"); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer(model, List.of("abc"), new HashMap<>(), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat( + result.asMap(), + Matchers.is( + SparseEmbeddingResultsTests.buildExpectation( + List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f), false)) + ) + ) + ); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), Matchers.is(1)); + assertThat(requestMap.get("inputs"), Matchers.is(List.of("abc"))); + } + } + + private Map getRequestConfigMap(Map serviceSettings, Map secretSettings) { + var builtServiceSettings = new HashMap<>(); + builtServiceSettings.putAll(serviceSettings); + builtServiceSettings.putAll(secretSettings); + + return new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, builtServiceSettings)); + } + + private HuggingFaceServiceTests.PeristedConfig getPersistedConfigMap(Map serviceSettings) { + return getPersistedConfigMap(serviceSettings, Map.of(), null); + } + + private HuggingFaceServiceTests.PeristedConfig getPersistedConfigMap( + Map serviceSettings, + @Nullable Map secretSettings + ) { + return getPersistedConfigMap(serviceSettings, Map.of(), secretSettings); + } + + private HuggingFaceServiceTests.PeristedConfig getPersistedConfigMap( + Map serviceSettings, + Map taskSettings, + Map secretSettings + ) { + + var secrets = secretSettings == null ? null : new HashMap(Map.of(ModelSecrets.SECRET_SETTINGS, secretSettings)); + + return new HuggingFaceServiceTests.PeristedConfig( + new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), + secrets + ); + } + + private record PeristedConfig(Map config, Map secrets) {} +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java new file mode 100644 index 0000000000000..89ad9fd5543df --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModelTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.elser; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class HuggingFaceElserModelTests extends ESTestCase { + + public void testThrowsURISyntaxException_ForInvalidUrl() { + var thrownException = expectThrows(IllegalArgumentException.class, () -> createModel("^^", "secret")); + assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + + public static HuggingFaceElserModel createModel(String url, String apiKey) { + return new HuggingFaceElserModel( + "id", + TaskType.SPARSE_EMBEDDING, + "service", + new HuggingFaceElserServiceSettings(url), + new HuggingFaceElserSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java index c3aa628705195..2b8281da8db13 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java @@ -33,6 +33,10 @@ public void testFromMap() { assertThat(new HuggingFaceElserSecretSettings(new SecureString(apiKey.toCharArray())), is(serviceSettings)); } + public void testFromMap_ReturnsNull_WhenMapIsNull() { + assertNull(HuggingFaceElserSecretSettings.fromMap(null)); + } + public void testFromMap_MissingApiKey_ThrowsError() { var thrownException = expectThrows(ValidationException.class, () -> HuggingFaceElserSecretSettings.fromMap(new HashMap<>())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java new file mode 100644 index 0000000000000..6cf70189cea74 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.huggingface.embeddings; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; + +import static org.hamcrest.Matchers.is; + +public class HuggingFaceEmbeddingsModelTests extends ESTestCase { + + public void testThrowsURISyntaxException_ForInvalidUrl() { + var thrownException = expectThrows(IllegalArgumentException.class, () -> createModel("^^", "secret")); + assertThat(thrownException.getMessage(), is("unable to parse url [^^]")); + } + + public static HuggingFaceEmbeddingsModel createModel(String url, String apiKey) { + return new HuggingFaceEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new HuggingFaceServiceSettings(url), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandlerTests.java deleted file mode 100644 index cdef3914ec7c7..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiResponseHandlerTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.openai; - -import org.apache.http.HttpResponse; -import org.apache.http.StatusLine; -import org.apache.http.client.methods.HttpRequestBase; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.external.http.retry.RetryException; - -import static org.hamcrest.Matchers.containsString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class OpenAiResponseHandlerTests extends ESTestCase { - - public void testCheckForFailureStatusCode() { - var statusLine = mock(StatusLine.class); - when(statusLine.getStatusCode()).thenReturn(200).thenReturn(503).thenReturn(429).thenReturn(401).thenReturn(300).thenReturn(402); - - var httpResponse = mock(HttpResponse.class); - when(httpResponse.getStatusLine()).thenReturn(statusLine); - - var httpRequest = mock(HttpRequestBase.class); - - var httpResult = new HttpResult(httpResponse, new byte[] {}); - - // 200 ok - OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult); - // 503 - var retryException = expectThrows( - RetryException.class, - () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult) - ); - assertFalse(retryException.shouldRetry()); - assertThat(retryException.getMessage(), containsString("Received a server error status code for request [null] status [503]")); - // 429 - retryException = expectThrows(RetryException.class, () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat(retryException.getMessage(), containsString("Received a rate limit status code for request [null] status [429]")); - // 401 - retryException = expectThrows(RetryException.class, () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat( - retryException.getMessage(), - containsString("Received a authentication error status code for request [null] status [401]") - ); - // 300 - retryException = expectThrows(RetryException.class, () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat(retryException.getMessage(), containsString("Unhandled redirection for request [null] status [300]")); - // 402 - retryException = expectThrows(RetryException.class, () -> OpenAiResponseHandler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat(retryException.getMessage(), containsString("Received an unsuccessful status code for request [null] status [402]")); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java index 9fbcc3bec7a60..9e20286c1d0ff 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java @@ -126,7 +126,6 @@ protected OpenAiServiceSettings mutateInstance(OpenAiServiceSettings instance) t } public static Map getServiceSettingsMap(@Nullable String url, @Nullable String org) { - var map = new HashMap(); if (url != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 0d57e90dcd31b..a82600c537663 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -48,6 +47,7 @@ import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettingsTests.getServiceSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettingsTests.getTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; @@ -260,7 +260,7 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlO } } - public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModel() throws IOException { + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModel() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -291,7 +291,7 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModel() throws IOE } } - public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException { + public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -321,7 +321,7 @@ public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() thro } } - public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlOrganization() throws IOException { + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlOrganization() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -352,7 +352,7 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUr } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException { + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -366,24 +366,25 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws ); persistedConfig.config().put("extra_key", "value"); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecretsSettings() throws IOException { + public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExistsInSecretsSettings() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -399,24 +400,25 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecretsSettings secretSettingsMap ); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecrets() throws IOException { + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSecrets() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -430,24 +432,25 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInSecrets() throw ); persistedConfig.secrets.put("extra_key", "value"); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInServiceSettings() throws IOException { + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -463,24 +466,25 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInServiceSettings getSecretSettingsMap("secret") ); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } - public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInTaskSettings() throws IOException { + public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { try ( var service = new OpenAiService( new SetOnce<>(mock(HttpRequestSenderFactory.class)), @@ -496,64 +500,160 @@ public void testParsePersistedConfig_ThrowsWhenAnExtraKeyExistsInTaskSettings() getSecretSettingsMap("secret") ); + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModel() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url", "org"), getTaskSettingsMap("model", "user")); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url", "org"), getTaskSettingsMap("model", "user")); + var thrownException = expectThrows( ElasticsearchStatusException.class, - () -> service.parsePersistedConfigWithSecrets( - "id", - TaskType.TEXT_EMBEDDING, - persistedConfig.config(), - persistedConfig.secrets() - ) + () -> service.parsePersistedConfig("id", TaskType.SPARSE_EMBEDDING, persistedConfig.config()) ); assertThat( thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") + is("Failed to parse stored model [id] for [openai] service, please delete and add the service again") ); } } - public void testStart_InitializesTheSender() throws IOException { - var sender = mock(Sender.class); + public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlOrganization() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap(null, null), getTaskSettingsMap("model", null)); - var factory = mock(HttpRequestSenderFactory.class); - when(factory.createSender(anyString())).thenReturn(sender); + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - try (var service = new OpenAiService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { - PlainActionFuture listener = new PlainActionFuture<>(); - service.start(mock(Model.class), listener); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); - listener.actionGet(TIMEOUT); - verify(sender, times(1)).start(); - verify(factory, times(1)).createSender(anyString()); + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertNull(embeddingsModel.getServiceSettings().uri()); + assertNull(embeddingsModel.getServiceSettings().organizationId()); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertNull(embeddingsModel.getTaskSettings().user()); + assertNull(embeddingsModel.getSecretSettings()); } + } - verify(sender, times(1)).close(); - verifyNoMoreInteractions(factory); - verifyNoMoreInteractions(sender); + public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url", "org"), getTaskSettingsMap("model", "user")); + persistedConfig.config().put("extra_key", "value"); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getSecretSettings()); + } } - public void testStart_CallingStartTwiceKeepsSameSenderReference() throws IOException { - var sender = mock(Sender.class); + public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettings() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var serviceSettingsMap = getServiceSettingsMap("url", "org"); + serviceSettingsMap.put("extra_key", "value"); - var factory = mock(HttpRequestSenderFactory.class); - when(factory.createSender(anyString())).thenReturn(sender); + var persistedConfig = getPersistedConfigMap(serviceSettingsMap, getTaskSettingsMap("model", "user")); - try (var service = new OpenAiService(new SetOnce<>(factory), new SetOnce<>(createWithEmptySettings(threadPool)))) { - PlainActionFuture listener = new PlainActionFuture<>(); - service.start(mock(Model.class), listener); - listener.actionGet(TIMEOUT); + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); - service.start(mock(Model.class), listener); - listener.actionGet(TIMEOUT); + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); - verify(factory, times(1)).createSender(anyString()); - verify(sender, times(2)).start(); + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getSecretSettings()); } + } - verify(sender, times(1)).close(); - verifyNoMoreInteractions(factory); - verifyNoMoreInteractions(sender); + public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings() throws IOException { + try ( + var service = new OpenAiService( + new SetOnce<>(mock(HttpRequestSenderFactory.class)), + new SetOnce<>(createWithEmptySettings(threadPool)) + ) + ) { + var taskSettingsMap = getTaskSettingsMap("model", "user"); + taskSettingsMap.put("extra_key", "value"); + + var persistedConfig = getPersistedConfigMap(getServiceSettingsMap("url", "org"), taskSettingsMap); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getSecretSettings()); + } } public void testInfer_ThrowsErrorWhenModelIsNotOpenAiModel() throws IOException { @@ -653,23 +753,12 @@ public void testInfer_UnauthorisedResponse() throws IOException { service.infer(model, List.of("abc"), new HashMap<>(), listener); var error = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat(error.getMessage(), containsString("Received a authentication error status code for request")); + assertThat(error.getMessage(), containsString("Received an authentication error status code for request")); assertThat(error.getMessage(), containsString("Error message: [Incorrect API key provided:]")); assertThat(webServer.requests(), hasSize(1)); } } - private static Model getInvalidModel(String modelId, String serviceName) { - var mockConfigs = mock(ModelConfigurations.class); - when(mockConfigs.getModelId()).thenReturn(modelId); - when(mockConfigs.getService()).thenReturn(serviceName); - - var mockModel = mock(Model.class); - when(mockModel.getConfigurations()).thenReturn(mockConfigs); - - return mockModel; - } - private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, @@ -696,5 +785,13 @@ private PeristedConfig getPersistedConfigMap( ); } + private PeristedConfig getPersistedConfigMap(Map serviceSettings, Map taskSettings) { + + return new PeristedConfig( + new HashMap<>(Map.of(ModelConfigurations.SERVICE_SETTINGS, serviceSettings, ModelConfigurations.TASK_SETTINGS, taskSettings)), + null + ); + } + private record PeristedConfig(Map config, Map secrets) {} } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java index 2fd952fbbdda4..bd7a3ef4dcf03 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java @@ -33,6 +33,10 @@ public void testFromMap() { assertThat(new DefaultSecretSettings(new SecureString(apiKey.toCharArray())), is(serviceSettings)); } + public void testFromMap_ReturnsNull_WhenMapIsNull() { + assertNull(DefaultSecretSettings.fromMap(null)); + } + public void testFromMap_MissingApiKey_ThrowsError() { var thrownException = expectThrows(ValidationException.class, () -> DefaultSecretSettings.fromMap(new HashMap<>())); From fcd923902d6395455e179b7df5a4e705240cf2a8 Mon Sep 17 00:00:00 2001 From: Volodymyr Krasnikov <129072588+volodk85@users.noreply.github.com> Date: Fri, 1 Dec 2023 12:32:44 -0800 Subject: [PATCH 162/263] Enable debug logging on the test (#102793) On a successful executions test expects to get exception `expectThrows(SearchPhaseExecutionException.class, () -> client().search(request).actionGet());` which is thrown here, line L401 https://github.com/elastic/elasticsearch/blob/b18b5cba60540b6335ba9c69b1ec96b50f2a082a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java#L395-L401 Apparently the condition `if (shardSearchFailures.length == getNumShards())` does not hold sometimes and execution thread passes through to the next search phase. This PR enables debug logging to get more insights on possible root cause. Also, it fixes test code, there were missing assignments of `searchShardsResponse` sothat entire `if` block was never assessed. Related to #97878 --- ...pshotsCanMatchOnCoordinatorIntegTests.java | 28 ++++++++++++++----- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index 32c031f80177d..844e6099460b2 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotAction; @@ -559,6 +560,10 @@ public void testQueryPhaseIsExecutedInAnAvailableNodeWhenAllShardsCanBeSkipped() * Can match against searchable snapshots is tested via both the Search API and the SearchShards (transport-only) API. * The latter is a way to do only a can-match rather than all search phases. */ + @TestIssueLogging( + issueUrl = "https://github.com/elastic/elasticsearch/issues/97878", + value = "org.elasticsearch.snapshots:DEBUG,org.elasticsearch.indices.recovery:DEBUG,org.elasticsearch.action.search:DEBUG" + ) public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCoordinatingNode() throws Exception { internalCluster().startMasterOnlyNode(); internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); @@ -622,7 +627,18 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo // All shards failed, since all shards are unassigned and the IndexMetadata min/max timestamp // is not available yet - expectThrows(SearchPhaseExecutionException.class, () -> client().search(request).actionGet()); + expectThrows(SearchPhaseExecutionException.class, () -> { + SearchResponse response = client().search(request).actionGet(); + logger.info( + "[TEST DEBUG INFO] Search hits: {} Successful shards: {}, failed shards: {}, skipped shards: {}, total shards: {}", + response.getHits().getTotalHits().value, + response.getSuccessfulShards(), + response.getFailedShards(), + response.getSkippedShards(), + response.getTotalShards() + ); + fail("This search call is expected to throw an exception but it did not"); + }); // test with SearchShards API boolean allowPartialSearchResults = false; @@ -639,15 +655,13 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo { SearchShardsResponse searchShardsResponse = null; try { - client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); + searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); } catch (SearchPhaseExecutionException e) { // ignore as this is expected to happen } if (searchShardsResponse != null) { - if (searchShardsResponse != null) { - for (SearchShardsGroup group : searchShardsResponse.getGroups()) { - assertFalse("no shard should be marked as skipped", group.skipped()); - } + for (SearchShardsGroup group : searchShardsResponse.getGroups()) { + assertFalse("no shard should be marked as skipped", group.skipped()); } } } @@ -680,7 +694,7 @@ public void testSearchableSnapshotShardsThatHaveMatchingDataAreNotSkippedOnTheCo { SearchShardsResponse searchShardsResponse = null; try { - client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); + searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); } catch (SearchPhaseExecutionException e) { // ignore as this is expected to happen } From 76a6dd618b180f3873130791307b47d3faef8fe6 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 1 Dec 2023 14:49:28 -0800 Subject: [PATCH 163/263] New QA module for heap attack tests (#102833) The heap attack test suite is very special. It deliberately tries to take down Elasticsearch testing instances. When one of Elasticsearch testing instance is terminated, other tests will fail. This PR avoids such noise by adding a new QA module for only heap attack tests. --- .../internal/RestrictedBuildApiService.java | 1 + .../esql/qa/server/heap-attack/build.gradle | 19 ++++++++++++++ .../esql/qa/heap_attack}/HeapAttackIT.java | 25 +++++++++---------- .../esql/qa/server/multi-node/build.gradle | 2 +- 4 files changed, 33 insertions(+), 14 deletions(-) create mode 100644 x-pack/plugin/esql/qa/server/heap-attack/build.gradle rename x-pack/plugin/esql/qa/server/{single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node => heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack}/HeapAttackIT.java (97%) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index cafa02941d77c..2d5dc65a43fae 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -124,6 +124,7 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:eql:qa:correctness"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:eql:qa:mixed-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:security"); + map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:heap-attack"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:esql:qa:server:single-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:fleet:qa:rest"); diff --git a/x-pack/plugin/esql/qa/server/heap-attack/build.gradle b/x-pack/plugin/esql/qa/server/heap-attack/build.gradle new file mode 100644 index 0000000000000..de88fdecf2b14 --- /dev/null +++ b/x-pack/plugin/esql/qa/server/heap-attack/build.gradle @@ -0,0 +1,19 @@ +apply plugin: 'elasticsearch.legacy-yaml-rest-test' + +dependencies { + javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) +} + +restResources { + restApi { + include '_common', 'bulk', 'indices', 'esql', 'xpack', 'enrich' + } +} + +testClusters.configureEach { + numberOfNodes = 1 + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.monitoring.collection.enabled', 'true' + setting 'xpack.security.enabled', 'false' +} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java b/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java similarity index 97% rename from x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java rename to x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java index 31d0a7646e1b7..2cc13117a299f 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java +++ b/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.qa.single_node; +package org.elasticsearch.xpack.esql.qa.heap_attack; import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; @@ -40,6 +40,7 @@ import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.any; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; /** @@ -472,25 +473,23 @@ private void initIndex(String name, String bulk) throws IOException { Request request = new Request("POST", "/" + name + "/_refresh"); Response response = client().performRequest(request); - assertThat( - EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), - equalTo("{\"_shards\":{\"total\":2,\"successful\":1,\"failed\":0}}") - ); + assertWriteResponse(response); request = new Request("POST", "/" + name + "/_forcemerge"); request.addParameter("max_num_segments", "1"); response = client().performRequest(request); - assertThat( - EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), - equalTo("{\"_shards\":{\"total\":2,\"successful\":1,\"failed\":0}}") - ); + assertWriteResponse(response); request = new Request("POST", "/" + name + "/_refresh"); response = client().performRequest(request); - assertThat( - EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), - equalTo("{\"_shards\":{\"total\":2,\"successful\":1,\"failed\":0}}") - ); + assertWriteResponse(response); + } + + @SuppressWarnings("unchecked") + private static void assertWriteResponse(Response response) throws IOException { + Map shards = (Map) entityAsMap(response).get("_shards"); + assertThat((int) shards.get("successful"), greaterThanOrEqualTo(1)); + assertThat(shards.get("failed"), equalTo(0)); } @Before diff --git a/x-pack/plugin/esql/qa/server/multi-node/build.gradle b/x-pack/plugin/esql/qa/server/multi-node/build.gradle index 1b62fdea2671c..300ed4df92bc2 100644 --- a/x-pack/plugin/esql/qa/server/multi-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-node/build.gradle @@ -6,7 +6,7 @@ dependencies { restResources { restApi { - include '_common', 'bulk', 'indices', 'esql', 'xpack', 'enrich' + include '_common', 'bulk', 'indices', 'esql', 'xpack' } } From 624909b2840b76a310c00f4b70644a953d63ab1b Mon Sep 17 00:00:00 2001 From: David Turner Date: Sun, 3 Dec 2023 01:15:49 +0100 Subject: [PATCH 164/263] AwaitsFix for #102899 --- .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 0b3b4fae82324..2b686ef1cb61d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -768,6 +768,7 @@ public void testMaxDimsByteVector() throws IOException { assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102899") public void testVectorSimilarity() { assertEquals( VectorSimilarityFunction.COSINE, From 7e24080fb26a88d7b1a0b897ef425317251747d5 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Sun, 3 Dec 2023 02:08:12 +0100 Subject: [PATCH 165/263] Suppress gradle welcome messages (#102898) There are not usefuil for us --- gradle.properties | 1 + 1 file changed, 1 insertion(+) diff --git a/gradle.properties b/gradle.properties index 64cb394206e66..745fb4f9e51ae 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,3 +1,4 @@ +org.gradle.welcome=never org.gradle.warning.mode=none org.gradle.parallel=true # We need to declare --add-exports to make spotless working seamlessly with jdk16 From 9018f58954231358881bd03b36e3d88f92a35db8 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sun, 3 Dec 2023 19:25:03 +0100 Subject: [PATCH 166/263] Fix failing to release ref-count after resolving ActionListener (#102900) Fixing a couple of spots that I found by making `SearchResponse` actually ref-counted, where we missed decrementing a ref-count after passing a just constructed object to a listener. Added short-cut utility for this to `ActionListener` because this pattern is already all over the place and will become even more common shortly as the search response ref-counting work is progressing. --- .../elasticsearch/action/ActionListener.java | 13 +++++++++ .../elasticsearch/action/ActionRunnable.java | 7 +---- .../search/AbstractSearchAsyncAction.java | 2 +- .../search/SearchScrollAsyncAction.java | 3 +- .../search/TransportMultiSearchAction.java | 11 ++----- .../action/search/TransportSearchAction.java | 6 ++-- .../action/search/ExpandSearchPhaseTests.java | 29 +++++++------------ .../search/FetchLookupFieldsPhaseTests.java | 7 +---- .../search/MultiSearchActionTookTests.java | 26 ++++++++--------- .../SearchQueryThenFetchAsyncActionTests.java | 8 +---- .../xpack/search/AsyncSearchTask.java | 6 ++-- .../GetCcrRestoreFileChunkAction.java | 7 +---- .../compute/OwningChannelActionListener.java | 6 +--- .../persistence/JobResultsProviderTests.java | 14 ++++----- .../action/TransportRollupSearchAction.java | 2 +- 15 files changed, 60 insertions(+), 87 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index b0e18d5ef9b55..5017f0af0007c 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.CheckedRunnable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; import java.util.ArrayList; @@ -309,6 +310,18 @@ static void completeWith(ActionListener listener, CheckedSu } } + /** + * Shorthand for resolving given {@code listener} with given {@code response} and decrementing the response's ref count by one + * afterwards. + */ + static void respondAndRelease(ActionListener listener, R response) { + try { + listener.onResponse(response); + } finally { + response.decRef(); + } + } + /** * @return A listener which (if assertions are enabled) wraps around the given delegate and asserts that it is only called once. */ diff --git a/server/src/main/java/org/elasticsearch/action/ActionRunnable.java b/server/src/main/java/org/elasticsearch/action/ActionRunnable.java index 7feabf7e0241f..7c0879941af89 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRunnable.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRunnable.java @@ -70,12 +70,7 @@ public static ActionRunnable supplyAndDecRef( return wrap(listener, new CheckedConsumer<>() { @Override public void accept(ActionListener l) throws Exception { - var res = supplier.get(); - try { - l.onResponse(res); - } finally { - res.decRef(); - } + ActionListener.respondAndRelease(l, supplier.get()); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 82c2f020a0962..d821764e788b7 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -700,7 +700,7 @@ public void sendSearchResponse(InternalSearchResponse internalSearchResponse, At searchContextId = null; } } - listener.onResponse(buildSearchResponse(internalSearchResponse, failures, scrollId, searchContextId)); + ActionListener.respondAndRelease(listener, buildSearchResponse(internalSearchResponse, failures, scrollId, searchContextId)); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index fc1ccfb00d6ce..5681bda8b2741 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -247,7 +247,8 @@ protected final void sendResponse( if (request.scroll() != null) { scrollId = request.scrollId(); } - listener.onResponse( + ActionListener.respondAndRelease( + listener, new SearchResponse( internalResponse, scrollId, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index a7d971069f96d..1fc9bca607285 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -186,15 +186,10 @@ private void handleResponse(final int responseSlot, final MultiSearchResponse.It } private void finish() { - final var response = new MultiSearchResponse( - responses.toArray(new MultiSearchResponse.Item[responses.length()]), - buildTookInMillis() + ActionListener.respondAndRelease( + listener, + new MultiSearchResponse(responses.toArray(new MultiSearchResponse.Item[responses.length()]), buildTookInMillis()) ); - try { - listener.onResponse(response); - } finally { - response.decRef(); - } } /** diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 38d448a8a9372..9010fa1ea0e75 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -549,8 +549,8 @@ public void onResponse(SearchResponse searchResponse) { searchResponse.isTerminatedEarly(), searchResponse.getNumReducePhases() ); - - listener.onResponse( + ActionListener.respondAndRelease( + listener, new SearchResponse( internalSearchResponse, searchResponse.getScrollId(), @@ -571,7 +571,7 @@ public void onFailure(Exception e) { logCCSError(failure, clusterAlias, skipUnavailable); ccsClusterInfoUpdate(failure, clusters, clusterAlias, skipUnavailable); if (skipUnavailable) { - listener.onResponse(SearchResponse.empty(timeProvider::buildTookInMillis, clusters)); + ActionListener.respondAndRelease(listener, SearchResponse.empty(timeProvider::buildTookInMillis, clusters)); } else { listener.onFailure(wrapRemoteClusterFailure(clusterAlias, e)); } diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 126d09663a169..f8a22ec04fb15 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -102,15 +102,10 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL mSearchResponses.add(new MultiSearchResponse.Item(mockSearchPhaseContext.searchResponse.get(), null)); } - var response = new MultiSearchResponse( - mSearchResponses.toArray(new MultiSearchResponse.Item[0]), - randomIntBetween(1, 10000) + ActionListener.respondAndRelease( + listener, + new MultiSearchResponse(mSearchResponses.toArray(new MultiSearchResponse.Item[0]), randomIntBetween(1, 10000)) ); - try { - listener.onResponse(response); - } finally { - response.decRef(); - } } }; @@ -170,17 +165,15 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - var response = new MultiSearchResponse( - new MultiSearchResponse.Item[] { - new MultiSearchResponse.Item(null, new RuntimeException("boom")), - new MultiSearchResponse.Item(searchResponse, null) }, - randomIntBetween(1, 10000) + ActionListener.respondAndRelease( + listener, + new MultiSearchResponse( + new MultiSearchResponse.Item[] { + new MultiSearchResponse.Item(null, new RuntimeException("boom")), + new MultiSearchResponse.Item(searchResponse, null) }, + randomIntBetween(1, 10000) + ) ); - try { - listener.onResponse(response); - } finally { - response.decRef(); - } } }; diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java index 215293517a467..38409752c7e7d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java @@ -119,12 +119,7 @@ void sendExecuteMultiSearch( null ); } - var response = new MultiSearchResponse(responses, randomNonNegativeLong()); - try { - listener.onResponse(response); - } finally { - response.decRef(); - } + ActionListener.respondAndRelease(listener, new MultiSearchResponse(responses, randomNonNegativeLong())); } }; diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index f1867b223760d..9b1ed6eee1028 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -147,21 +147,19 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - var resp = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY + ActionListener.respondAndRelease( + listener, + new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 0, + 0, + 0, + 0L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ) ); - try { - listener.onResponse(resp); - } finally { - resp.decRef(); - } }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 3097376de7a41..a973fa20851db 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -157,13 +157,7 @@ public void sendExecuteQuery( queryResult.size(1); successfulOps.incrementAndGet(); queryResult.incRef(); - new Thread(() -> { - try { - listener.onResponse(queryResult); - } finally { - queryResult.decRef(); - } - }).start(); + new Thread(() -> ActionListener.respondAndRelease(listener, queryResult)).start(); } finally { queryResult.decRef(); } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java index d445a012ecee9..8851d27fb087d 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java @@ -201,7 +201,7 @@ public void addCompletionListener(ActionListener listener, } } if (executeImmediately) { - listener.onResponse(getResponseWithHeaders()); + ActionListener.respondAndRelease(listener, getResponseWithHeaders()); } } @@ -238,7 +238,7 @@ private void internalAddCompletionListener(ActionListener l if (hasRun.compareAndSet(false, true)) { // timeout occurred before completion removeCompletionListener(id); - listener.onResponse(getResponseWithHeaders()); + ActionListener.respondAndRelease(listener, getResponseWithHeaders()); } }, waitForCompletion, threadPool.generic()); } catch (Exception exc) { @@ -255,7 +255,7 @@ private void internalAddCompletionListener(ActionListener l } } if (executeImmediately) { - listener.onResponse(getResponseWithHeaders()); + ActionListener.respondAndRelease(listener, getResponseWithHeaders()); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java index 7aab281f4f7ed..53751343f0783 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/GetCcrRestoreFileChunkAction.java @@ -85,12 +85,7 @@ protected void doExecute( try (CcrRestoreSourceService.SessionReader sessionReader = restoreSourceService.getSessionReader(sessionUUID)) { long offsetAfterRead = sessionReader.readFileBytes(fileName, reference); long offsetBeforeRead = offsetAfterRead - reference.length(); - var chunk = new GetCcrRestoreFileChunkResponse(offsetBeforeRead, reference); - try { - listener.onResponse(chunk); - } finally { - chunk.decRef(); - } + ActionListener.respondAndRelease(listener, new GetCcrRestoreFileChunkResponse(offsetBeforeRead, reference)); } } catch (IOException e) { listener.onFailure(e); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java index 6512d80859163..50a20ee6ee73d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java @@ -28,11 +28,7 @@ public OwningChannelActionListener(TransportChannel channel) { @Override public void onResponse(Response response) { - try { - listener.onResponse(response); - } finally { - response.decRef(); - } + ActionListener.respondAndRelease(listener, response); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index 0fe693490d466..39f02f71642ed 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -948,15 +948,13 @@ private Client getMockedClient(Consumer queryBuilderConsumer, Sear queryBuilderConsumer.accept(multiSearchRequest.requests().get(0).source().query()); @SuppressWarnings("unchecked") ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[1]; - MultiSearchResponse mresponse = new MultiSearchResponse( - new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(response, null) }, - randomNonNegativeLong() + ActionListener.respondAndRelease( + actionListener, + new MultiSearchResponse( + new MultiSearchResponse.Item[] { new MultiSearchResponse.Item(response, null) }, + randomNonNegativeLong() + ) ); - try { - actionListener.onResponse(mresponse); - } finally { - mresponse.decRef(); - } return null; }).when(client).multiSearch(any(), any()); doAnswer(invocationOnMock -> { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 9fe634a178179..ff167c5586dce 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -143,7 +143,7 @@ public AggregationReduceContext forFinalReduction() { ); } }; - listener.onResponse(processResponses(rollupSearchContext, msearchResponse, reduceContextBuilder)); + ActionListener.respondAndRelease(listener, processResponses(rollupSearchContext, msearchResponse, reduceContextBuilder)); }, listener::onFailure)); } From 759280c75856de312ec7b2a99c219cec4cc61b1b Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 4 Dec 2023 08:07:45 +0100 Subject: [PATCH 167/263] [Connector API] Implement update last_sync endpoint (#102858) --- .../api/connector.last_sync.json | 39 +++ .../334_connector_update_last_sync_stats.yml | 62 +++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 32 ++- .../connector/ConnectorIndexService.java | 51 +++- .../connector/ConnectorSyncInfo.java | 54 ++-- ...estUpdateConnectorLastSyncStatsAction.java | 45 ++++ ...ortUpdateConnectorLastSyncStatsAction.java | 55 ++++ .../UpdateConnectorLastSyncStatsAction.java | 240 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 48 ++++ .../connector/ConnectorTestUtils.java | 4 +- ...StatsActionRequestBWCSerializingTests.java | 52 ++++ ...tatsActionResponseBWCSerializingTests.java | 43 ++++ .../xpack/security/operator/Constants.java | 1 + 14 files changed, 669 insertions(+), 62 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json new file mode 100644 index 0000000000000..43b7b078eef58 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json @@ -0,0 +1,39 @@ +{ + "connector.last_sync": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the stats of last sync in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_last_sync", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "Object with stats related to the last connector sync run.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml new file mode 100644 index 0000000000000..f9989b615bef6 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/334_connector_update_last_sync_stats.yml @@ -0,0 +1,62 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector +--- +"Update Connector Last Sync Stats": + - do: + connector.last_sync: + connector_id: test-connector + body: + last_sync_error: "oh no error" + last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { last_sync_error: "oh no error" } + - match: { last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" } + +--- +"Update Connector Last Sync Stats - Connector doesn't exist": + - do: + catch: "missing" + connector.last_sync: + connector_id: test-non-existent-connector + body: + last_sync_error: "oh no error" + last_access_control_sync_scheduled_at: "2023-05-25T12:30:00.000Z" + +--- +"Update Connector Filtering - Wrong datetime expression": + - do: + catch: "bad_request" + connector.last_sync: + connector_id: test-connector + body: + last_access_control_sync_scheduled_at: "this is not a timestamp" + + +--- +"Update Connector Filtering - Wrong status": + - do: + catch: "bad_request" + connector.last_sync: + connector_id: test-connector + body: + last_sync_status: "this is not a valid status" + + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 29758c3c334cc..2a53a46760868 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; @@ -60,10 +61,12 @@ import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; @@ -200,6 +203,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), + new ActionHandler<>(UpdateConnectorLastSyncStatsAction.INSTANCE, TransportUpdateConnectorLastSyncStatsAction.class), new ActionHandler<>(UpdateConnectorPipelineAction.INSTANCE, TransportUpdateConnectorPipelineAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), @@ -265,6 +269,7 @@ public List getRestHandlers( new RestPutConnectorAction(), new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorLastSeenAction(), + new RestUpdateConnectorLastSyncStatsAction(), new RestUpdateConnectorPipelineAction(), new RestUpdateConnectorSchedulingAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index f824009196648..45b906d815aee 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -287,45 +287,45 @@ public Connector(StreamInput in) throws IOException { ObjectParser.ValueType.STRING_OR_NULL ); - PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); + PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> ConnectorSyncStatus.connectorSyncStatus(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); - PARSER.declareString(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> ConnectorSyncStatus.connectorSyncStatus(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()), ConnectorSyncInfo.LAST_SYNC_STATUS_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareField( optionalConstructorArg(), - (p, c) -> Instant.parse(p.text()), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), ConnectorSyncInfo.LAST_SYNCED_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareString(optionalConstructorArg(), NAME_FIELD); @@ -485,6 +485,10 @@ public Map getConfiguration() { return configuration; } + public ConnectorSyncInfo getSyncInfo() { + return syncInfo; + } + public Instant getLastSeen() { return lastSeen; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 9730a0217b942..d99ad28dc3970 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -33,6 +33,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; @@ -203,12 +204,42 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ } /** - * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. + * Updates the lastSeen property of a {@link Connector}. * - * @param request Request for updating connector ingest pipeline property. + * @param request The request for updating the connector's lastSeen status. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Updates the {@link ConnectorSyncInfo} properties in a {@link Connector}. + * + * @param request Request for updating connector last sync stats properties. * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { + public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -233,12 +264,12 @@ public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request reques } /** - * Updates the {@link ConnectorScheduling} property of a {@link Connector}. + * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. * - * @param request The request for updating the connector's scheduling. - * @param listener The listener for handling responses, including successful updates or errors. + * @param request Request for updating connector ingest pipeline property. + * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { + public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -263,12 +294,12 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } /** - * Updates the lastSeen property of a {@link Connector}. + * Updates the {@link ConnectorScheduling} property of a {@link Connector}. * - * @param request The request for updating the connector's lastSeen status. + * @param request The request for updating the connector's scheduling. * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java index 8f2002efff5b6..7daae030155b7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorSyncInfo.java @@ -90,51 +90,33 @@ public ConnectorSyncInfo(StreamInput in) throws IOException { this.lastSynced = in.readOptionalInstant(); } - static final ParseField LAST_ACCESS_CONTROL_SYNC_ERROR = new ParseField("last_access_control_sync_error"); - static final ParseField LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD = new ParseField("last_access_control_sync_status"); - static final ParseField LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_access_control_sync_scheduled_at"); - static final ParseField LAST_DELETED_DOCUMENT_COUNT_FIELD = new ParseField("last_deleted_document_count"); - static final ParseField LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_incremental_sync_scheduled_at"); - static final ParseField LAST_INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("last_indexed_document_count"); - static final ParseField LAST_SYNC_ERROR_FIELD = new ParseField("last_sync_error"); - static final ParseField LAST_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_sync_scheduled_at"); - static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); - static final ParseField LAST_SYNCED_FIELD = new ParseField("last_synced"); + public static final ParseField LAST_ACCESS_CONTROL_SYNC_ERROR = new ParseField("last_access_control_sync_error"); + public static final ParseField LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD = new ParseField("last_access_control_sync_status"); + public static final ParseField LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_access_control_sync_scheduled_at"); + public static final ParseField LAST_DELETED_DOCUMENT_COUNT_FIELD = new ParseField("last_deleted_document_count"); + public static final ParseField LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_incremental_sync_scheduled_at"); + public static final ParseField LAST_INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("last_indexed_document_count"); + public static final ParseField LAST_SYNC_ERROR_FIELD = new ParseField("last_sync_error"); + public static final ParseField LAST_SYNC_SCHEDULED_AT_FIELD = new ParseField("last_sync_scheduled_at"); + public static final ParseField LAST_SYNC_STATUS_FIELD = new ParseField("last_sync_status"); + public static final ParseField LAST_SYNCED_FIELD = new ParseField("last_synced"); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - - if (lastAccessControlSyncError != null) { - builder.field(LAST_ACCESS_CONTROL_SYNC_ERROR.getPreferredName(), lastAccessControlSyncError); - } - if (lastAccessControlSyncStatus != null) { - builder.field(LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD.getPreferredName(), lastAccessControlSyncStatus); - } - if (lastAccessControlSyncScheduledAt != null) { - builder.field(LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastAccessControlSyncScheduledAt); - } + builder.field(LAST_ACCESS_CONTROL_SYNC_ERROR.getPreferredName(), lastAccessControlSyncError); + builder.field(LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD.getPreferredName(), lastAccessControlSyncStatus); + builder.field(LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastAccessControlSyncScheduledAt); if (lastDeletedDocumentCount != null) { builder.field(LAST_DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), lastDeletedDocumentCount); } - if (lastIncrementalSyncScheduledAt != null) { - builder.field(LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastIncrementalSyncScheduledAt); - } + builder.field(LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastIncrementalSyncScheduledAt); if (lastIndexedDocumentCount != null) { builder.field(LAST_INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), lastIndexedDocumentCount); } - if (lastSyncError != null) { - builder.field(LAST_SYNC_ERROR_FIELD.getPreferredName(), lastSyncError); - } - if (lastSyncScheduledAt != null) { - builder.field(LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastSyncScheduledAt); - } - if (lastSyncStatus != null) { - builder.field(LAST_SYNC_STATUS_FIELD.getPreferredName(), lastSyncStatus); - } - if (lastSynced != null) { - builder.field(LAST_SYNCED_FIELD.getPreferredName(), lastSynced); - } - + builder.field(LAST_SYNC_ERROR_FIELD.getPreferredName(), lastSyncError); + builder.field(LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName(), lastSyncScheduledAt); + builder.field(LAST_SYNC_STATUS_FIELD.getPreferredName(), lastSyncStatus); + builder.field(LAST_SYNCED_FIELD.getPreferredName(), lastSynced); return builder; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java new file mode 100644 index 0000000000000..8e373ce48caf3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorLastSyncStatsAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_last_sync_stats_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_last_sync")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorLastSyncStatsAction.Request request = UpdateConnectorLastSyncStatsAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorLastSyncStatsAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorLastSyncStatsAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java new file mode 100644 index 0000000000000..9ec0105668fbc --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorLastSyncStatsAction extends HandledTransportAction< + UpdateConnectorLastSyncStatsAction.Request, + UpdateConnectorLastSyncStatsAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorLastSyncStatsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorLastSyncStatsAction.NAME, + transportService, + actionFilters, + UpdateConnectorLastSyncStatsAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorLastSyncStatsAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorLastSyncStats( + request, + listener.map(r -> new UpdateConnectorLastSyncStatsAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java new file mode 100644 index 0000000000000..328831cf0b840 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java @@ -0,0 +1,240 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.ConnectorSyncInfo; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; + +import java.io.IOException; +import java.time.Instant; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorLastSyncStatsAction extends ActionType { + + public static final UpdateConnectorLastSyncStatsAction INSTANCE = new UpdateConnectorLastSyncStatsAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_last_sync_stats"; + + public UpdateConnectorLastSyncStatsAction() { + super(NAME, UpdateConnectorLastSyncStatsAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + private final ConnectorSyncInfo syncInfo; + + public Request(String connectorId, ConnectorSyncInfo syncInfo) { + this.connectorId = connectorId; + this.syncInfo = syncInfo; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.syncInfo = in.readOptionalWriteable(ConnectorSyncInfo::new); + } + + public String getConnectorId() { + return connectorId; + } + + public ConnectorSyncInfo getSyncInfo() { + return syncInfo; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("connector_update_last_sync_stats_request", false, ((args, connectorId) -> { + int i = 0; + return new UpdateConnectorLastSyncStatsAction.Request( + connectorId, + new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) + .setLastAccessControlSyncScheduledAt((Instant) args[i++]) + .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastDeletedDocumentCount((Long) args[i++]) + .setLastIncrementalSyncScheduledAt((Instant) args[i++]) + .setLastIndexedDocumentCount((Long) args[i++]) + .setLastSyncError((String) args[i++]) + .setLastSyncScheduledAt((Instant) args[i++]) + .setLastSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastSynced((Instant) args[i++]) + .build() + ); + })); + + static { + PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()), + ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()), + ConnectorSyncInfo.LAST_SYNC_STATUS_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : Instant.parse(p.text()), + ConnectorSyncInfo.LAST_SYNCED_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + } + + public static UpdateConnectorLastSyncStatsAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorLastSyncStatsAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorLastSyncStatsAction.Request fromXContent(XContentParser parser, String connectorId) + throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + syncInfo.toXContent(builder, params); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalWriteable(syncInfo); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(syncInfo, request.syncInfo); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, syncInfo); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index c93135942348a..e155cdfefbfa1 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; @@ -131,6 +132,27 @@ public void testUpdateConnectorLastSeen() throws Exception { } + public void testUpdateConnectorLastSyncStats() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + ConnectorSyncInfo syncStats = ConnectorTestUtils.getRandomConnectorSyncInfo(); + + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request( + connector.getConnectorId(), + syncStats + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + + assertThat(syncStats, equalTo(indexedConnector.getSyncInfo())); + } + public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); DocWriteResponse resp = awaitPutConnector(connector); @@ -302,6 +324,32 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Request updateLastSyncStats) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorLastSyncStats(updateLastSyncStats, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update last sync stats request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update last sync stats request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorPipeline(UpdateConnectorPipelineAction.Request updatePipeline) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index a0cf018142599..98d0112d8910f 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -70,9 +70,9 @@ public static ConnectorSyncInfo getRandomConnectorSyncInfo() { return new ConnectorSyncInfo.Builder().setLastAccessControlSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastAccessControlSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastAccessControlSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) - .setLastDeletedDocumentCount(randomFrom(new Long[] { null, randomLong() })) + .setLastDeletedDocumentCount(randomLong()) .setLastIncrementalSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) - .setLastIndexedDocumentCount(randomFrom(new Long[] { null, randomLong() })) + .setLastIndexedDocumentCount(randomLong()) .setLastSyncError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setLastSyncScheduledAt(randomFrom(new Instant[] { null, Instant.ofEpochMilli(randomLong()) })) .setLastSyncStatus(randomFrom(new ConnectorSyncStatus[] { null, getRandomSyncStatus() })) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..0728a7b328eb4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorLastSyncStatsActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorLastSyncStatsAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorLastSyncStatsAction.Request::new; + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorLastSyncStatsAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorSyncInfo()); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Request mutateInstance(UpdateConnectorLastSyncStatsAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorLastSyncStatsAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Request mutateInstanceForVersion( + UpdateConnectorLastSyncStatsAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..dd214e10699ef --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorLastSyncStatsAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorLastSyncStatsAction.Response::new; + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Response createTestInstance() { + return new UpdateConnectorLastSyncStatsAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Response mutateInstance(UpdateConnectorLastSyncStatsAction.Response instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorLastSyncStatsAction.Response mutateInstanceForVersion( + UpdateConnectorLastSyncStatsAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index aa6c67798e3e3..5412e7d05f27f 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -129,6 +129,7 @@ public class Constants { "cluster:admin/xpack/connector/put", "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_last_seen", + "cluster:admin/xpack/connector/update_last_sync_stats", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/connector/sync_job/post", From 5a4d4b3c01195c99d6013bce31af7aeaa6693f4d Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 4 Dec 2023 09:13:42 +0100 Subject: [PATCH 168/263] Remove outdated spec (#102845) This spec can be removed as 7.15 was released --- .../test/indices.recovery/10_basic.yml | 44 ------------------- 1 file changed, 44 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml index ac3f379fb86cd..d7731c0073140 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.recovery/10_basic.yml @@ -1,8 +1,5 @@ --- "Indices recovery test": - - skip: - version: " - 7.14.99" - reason: recovery from snapshot bytes not available until 7.15 - do: indices.create: @@ -45,47 +42,6 @@ - gte: { test_1.shards.0.verify_index.total_time_in_millis: 0 } --- -"Indices recovery test without recovery from snapshot": -# to be removed once 7.15 is out. - - do: - indices.create: - index: test_1 - body: - settings: - index: - number_of_replicas: 0 - - - do: - cluster.health: - wait_for_status: green - - - do: - indices.recovery: - index: [test_1] - human: true - - - match: { test_1.shards.0.type: "EMPTY_STORE" } - - match: { test_1.shards.0.stage: "DONE" } - - match: { test_1.shards.0.primary: true } - - match: { test_1.shards.0.start_time: /^2\d\d\d-.+/ } - - match: { test_1.shards.0.target.ip: /^\d+\.\d+\.\d+\.\d+$/ } - - gte: { test_1.shards.0.index.files.total: 0 } - - gte: { test_1.shards.0.index.files.reused: 0 } - - gte: { test_1.shards.0.index.files.recovered: 0 } - - match: { test_1.shards.0.index.files.percent: /^\d+\.\d\%$/ } - - gte: { test_1.shards.0.index.size.total_in_bytes: 0 } - - gte: { test_1.shards.0.index.size.reused_in_bytes: 0 } - - gte: { test_1.shards.0.index.size.recovered_in_bytes: 0 } - - match: { test_1.shards.0.index.size.percent: /^\d+\.\d\%$/ } - - gte: { test_1.shards.0.index.source_throttle_time_in_millis: 0 } - - gte: { test_1.shards.0.index.target_throttle_time_in_millis: 0 } - - gte: { test_1.shards.0.translog.recovered: 0 } - - gte: { test_1.shards.0.translog.total: -1 } - - gte: { test_1.shards.0.translog.total_on_start: 0 } - - gte: { test_1.shards.0.translog.total_time_in_millis: 0 } - - gte: { test_1.shards.0.verify_index.check_index_time_in_millis: 0 } - - gte: { test_1.shards.0.verify_index.total_time_in_millis: 0 } ---- "Indices recovery test for closed index": - skip: version: " - 7.1.99" From aedbe683ae93116923672866d1bb635567a5f861 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Dec 2023 08:30:22 +0000 Subject: [PATCH 169/263] Fail S3 repository analysis on partial reads (#102840) Today when reading a blob from a S3 repository we will resume a download on a partial success. If this happens concurrently with a blob overwrite then we may resume the download against a blob with updated contents, which causes a checksum mismatch. A checksum mismatch during an overwrite suggests an atomicity failure, which can be misleading to users. With this commit we consider partial downloads during repository analysis as immediate errors instead, clarifying the repository problem. Relates #101100 --- docs/changelog/102840.yaml | 5 +++ .../s3/S3RetryingInputStream.java | 6 +++ .../s3/S3BlobContainerRetriesTests.java | 45 ++++++++++++++++++- .../AbstractBlobContainerRetriesTestCase.java | 17 ++++--- 4 files changed, 66 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/102840.yaml diff --git a/docs/changelog/102840.yaml b/docs/changelog/102840.yaml new file mode 100644 index 0000000000000..1d87cede632c9 --- /dev/null +++ b/docs/changelog/102840.yaml @@ -0,0 +1,5 @@ +pr: 102840 +summary: Fail S3 repository analysis on partial reads +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 6cad60f32de47..04eadba9f9f8f 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -166,6 +166,12 @@ private void ensureOpen() { } private void reopenStreamOrFail(IOException e) throws IOException { + if (purpose == OperationPurpose.REPOSITORY_ANALYSIS) { + logger.warn(() -> format(""" + failed reading [%s/%s] at offset [%s]""", blobStore.bucket(), blobKey, start + currentOffset), e); + throw e; + } + final int maxAttempts = blobStore.getMaxRetries() + 1; final long meaningfulProgressSize = Math.max(1L, blobStore.bufferSizeInBytes() / 100L); diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index a8a6d71928795..9ed68976aac8a 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -14,6 +14,7 @@ import com.sun.net.httpserver.HttpHandler; import org.apache.http.HttpStatus; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; @@ -36,6 +37,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.blobstore.AbstractBlobContainerRetriesTestCase; +import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.hamcrest.Matcher; import org.junit.After; @@ -519,7 +521,7 @@ public void handle(HttpExchange exchange) throws IOException { httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_max_retries"), new FlakyReadHandler()); - try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(randomRetryingPurpose(), "read_blob_max_retries")) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -535,12 +537,53 @@ public void handle(HttpExchange exchange) throws IOException { } } + public void testReadDoesNotRetryForRepositoryAnalysis() { + final int maxRetries = between(0, 5); + final int bufferSizeBytes = scaledRandomIntBetween( + 0, + randomFrom(1000, Math.toIntExact(S3Repository.BUFFER_SIZE_SETTING.get(Settings.EMPTY).getBytes())) + ); + final BlobContainer blobContainer = createBlobContainer(maxRetries, null, true, ByteSizeValue.ofBytes(bufferSizeBytes)); + + final byte[] bytes = randomBlobContent(); + + @SuppressForbidden(reason = "use a http server") + class FlakyReadHandler implements HttpHandler { + private int failureCount; + + @Override + public void handle(HttpExchange exchange) throws IOException { + if (failureCount != 0) { + ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError("failureCount=" + failureCount)); + } + failureCount += 1; + Streams.readFully(exchange.getRequestBody()); + sendIncompleteContent(exchange, bytes); + exchange.close(); + } + } + + httpServer.createContext(downloadStorageEndpoint(blobContainer, "read_blob_repo_analysis"), new FlakyReadHandler()); + + expectThrows(Exception.class, () -> { + try (InputStream inputStream = blobContainer.readBlob(OperationPurpose.REPOSITORY_ANALYSIS, "read_blob_repo_analysis")) { + final byte[] bytesRead = BytesReference.toBytes(Streams.readFully(inputStream)); + assertArrayEquals(Arrays.copyOfRange(bytes, 0, bytes.length), bytesRead); + } + }); + } + @Override protected Matcher getMaxRetriesMatcher(int maxRetries) { // some attempts make meaningful progress and do not count towards the max retry limit return allOf(greaterThanOrEqualTo(maxRetries), lessThanOrEqualTo(S3RetryingInputStream.MAX_SUPPRESSED_EXCEPTIONS)); } + @Override + protected OperationPurpose randomRetryingPurpose() { + return randomValueOtherThan(OperationPurpose.REPOSITORY_ANALYSIS, BlobStoreTestUtil::randomPurpose); + } + /** * Asserts that an InputStream is fully consumed, or aborted, when it is closed */ diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java index 4110472e8ef76..8d44c37fcd9f1 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/AbstractBlobContainerRetriesTestCase.java @@ -14,6 +14,7 @@ import org.apache.http.ConnectionClosedException; import org.apache.http.HttpStatus; import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.unit.ByteSizeValue; @@ -146,7 +147,7 @@ public void testReadBlobWithRetries() throws Exception { } }); - try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_blob_max_retries")) { + try (InputStream inputStream = blobContainer.readBlob(randomRetryingPurpose(), "read_blob_max_retries")) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -212,7 +213,7 @@ public void testReadRangeBlobWithRetries() throws Exception { final int position = randomIntBetween(0, bytes.length - 1); final int length = randomIntBetween(0, randomBoolean() ? bytes.length : Integer.MAX_VALUE); - try (InputStream inputStream = blobContainer.readBlob(randomPurpose(), "read_range_blob_max_retries", position, length)) { + try (InputStream inputStream = blobContainer.readBlob(randomRetryingPurpose(), "read_range_blob_max_retries", position, length)) { final int readLimit; final InputStream wrappedStream; if (randomBoolean()) { @@ -269,8 +270,8 @@ public void testReadBlobWithReadTimeouts() { exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob(randomPurpose(), "read_blob_incomplete") - : blobContainer.readBlob(randomPurpose(), "read_blob_incomplete", position, length) + ? blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete") + : blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete", position, length) ) { Streams.readFully(stream); } @@ -289,6 +290,10 @@ protected org.hamcrest.Matcher getMaxRetriesMatcher(int maxRetries) { return equalTo(maxRetries); } + protected OperationPurpose randomRetryingPurpose() { + return randomPurpose(); + } + public void testReadBlobWithNoHttpResponse() { final TimeValue readTimeout = TimeValue.timeValueMillis(between(100, 200)); final BlobContainer blobContainer = createBlobContainer(randomInt(5), readTimeout, null, null); @@ -323,8 +328,8 @@ public void testReadBlobWithPrematureConnectionClose() { final Exception exception = expectThrows(Exception.class, () -> { try ( InputStream stream = randomBoolean() - ? blobContainer.readBlob(randomPurpose(), "read_blob_incomplete", 0, 1) - : blobContainer.readBlob(randomPurpose(), "read_blob_incomplete") + ? blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete", 0, 1) + : blobContainer.readBlob(randomRetryingPurpose(), "read_blob_incomplete") ) { Streams.readFully(stream); } From d73f1ac4837539ce749dd81eaa51af4f552dea56 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Dec 2023 11:39:17 +0000 Subject: [PATCH 170/263] AwaitsFix for #102920 --- .../action/admin/cluster/stats/SearchUsageStatsTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java index 10419719a5ed1..cc4509500f9c1 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable.Reader; @@ -19,6 +20,7 @@ import java.util.List; import java.util.Map; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102920") // failing test is final, mute whole suite public class SearchUsageStatsTests extends AbstractWireSerializingTestCase { private static final List QUERY_TYPES = List.of( From e1fceae5e4c51233415c21049e3b45a3983e8ec2 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 4 Dec 2023 12:31:40 +0000 Subject: [PATCH 171/263] Distinguish blob store ops for data and metadata (#102836) Today all snapshot operations have an operation purpose of `SNAPSHOT`, but in fact some repository implementations may want to configure operations against snapshot metadata differently from those against snapshot data. This commit divides the purpose into `SNAPSHOT_DATA` and `SNAPSHOT_METADATA` to support this distinction. Relates https://github.com/elastic/elasticsearch/issues/81352 --- .../s3/S3BlobStoreRepositoryTests.java | 35 +-- .../repositories/s3/S3BlobContainer.java | 3 + .../s3/S3BlobContainerRetriesTests.java | 3 +- .../BlobStoreRepositoryCleanupIT.java | 6 +- ...BlobStoreRepositoryOperationPurposeIT.java | 243 ++++++++++++++++++ .../common/blobstore/BlobContainer.java | 31 +++ .../common/blobstore/OperationPurpose.java | 3 +- .../common/blobstore/fs/FsBlobContainer.java | 6 + .../recovery/SnapshotFilesProvider.java | 2 +- .../blobstore/BlobStoreRepository.java | 67 +++-- .../blobstore/ChecksumBlobStoreFormat.java | 4 +- .../blobstore/fs/FsBlobContainerTests.java | 12 +- .../blobstore/BlobStoreRepositoryTests.java | 4 +- .../snapshots/SnapshotResiliencyTests.java | 6 +- .../AbstractThirdPartyRepositoryTestCase.java | 7 +- .../blobstore/BlobStoreTestUtil.java | 9 +- .../ESBlobStoreRepositoryIntegTestCase.java | 6 +- .../input/DirectBlobContainerIndexInput.java | 2 +- .../input/MetadataCachingIndexInput.java | 4 +- 19 files changed, 386 insertions(+), 67 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryOperationPurposeIT.java diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 5a445a1524da5..c76364f48c081 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -75,7 +75,7 @@ import java.util.stream.StreamSupport; import static org.elasticsearch.repositories.RepositoriesModule.METRIC_REQUESTS_COUNT; -import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.allOf; @@ -85,8 +85,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; @@ -271,8 +269,12 @@ public void testMetrics() throws Exception { final List metrics = Measurement.combine(plugins.get(0).getLongCounterMeasurement(METRIC_REQUESTS_COUNT)); assertThat( - statsCollectors.size(), - equalTo(metrics.stream().map(m -> m.attributes().get("operation")).collect(Collectors.toSet()).size()) + statsCollectors.keySet().stream().map(S3BlobStore.StatsKey::operation).collect(Collectors.toSet()), + equalTo( + metrics.stream() + .map(m -> S3BlobStore.Operation.parse((String) m.attributes().get("operation"))) + .collect(Collectors.toSet()) + ) ); metrics.forEach(metric -> { assertThat( @@ -303,23 +305,24 @@ public void testRequestStatsWithOperationPurposes() throws IOException { final String repoName = createRepository(randomRepositoryName()); final RepositoriesService repositoriesService = internalCluster().getCurrentMasterNodeInstance(RepositoriesService.class); final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repoName); - final BlobStore blobStore = repository.blobStore(); - assertThat(blobStore, instanceOf(BlobStoreWrapper.class)); - final BlobStore delegateBlobStore = ((BlobStoreWrapper) blobStore).delegate(); - assertThat(delegateBlobStore, instanceOf(S3BlobStore.class)); - final S3BlobStore.StatsCollectors statsCollectors = ((S3BlobStore) delegateBlobStore).getStatsCollectors(); + final BlobStoreWrapper blobStore = asInstanceOf(BlobStoreWrapper.class, repository.blobStore()); + final S3BlobStore delegateBlobStore = asInstanceOf(S3BlobStore.class, blobStore.delegate()); + final S3BlobStore.StatsCollectors statsCollectors = delegateBlobStore.getStatsCollectors(); - // Initial stats are collected with the default operation purpose + // Initial stats are collected for repository verification, which counts as SNAPSHOT_METADATA final Set allOperations = EnumSet.allOf(S3BlobStore.Operation.class) .stream() .map(S3BlobStore.Operation::getKey) .collect(Collectors.toUnmodifiableSet()); - statsCollectors.collectors.keySet().forEach(statsKey -> assertThat(statsKey.purpose(), is(OperationPurpose.SNAPSHOT))); + assertThat( + statsCollectors.collectors.keySet().stream().map(S3BlobStore.StatsKey::purpose).collect(Collectors.toUnmodifiableSet()), + equalTo(Set.of(OperationPurpose.SNAPSHOT_METADATA)) + ); final Map initialStats = blobStore.stats(); assertThat(initialStats.keySet(), equalTo(allOperations)); // Collect more stats with an operation purpose other than the default - final OperationPurpose purpose = randomValueOtherThan(OperationPurpose.SNAPSHOT, BlobStoreTestUtil::randomPurpose); + final OperationPurpose purpose = randomValueOtherThan(OperationPurpose.SNAPSHOT_METADATA, BlobStoreTestUtil::randomPurpose); final BlobPath blobPath = repository.basePath().add(randomAlphaOfLength(10)); final BlobContainer blobContainer = blobStore.blobContainer(blobPath); final BytesArray whatToWrite = new BytesArray(randomByteArrayOfLength(randomIntBetween(100, 1000))); @@ -332,7 +335,7 @@ public void testRequestStatsWithOperationPurposes() throws IOException { // Internal stats collection is fine-grained and records different purposes assertThat( statsCollectors.collectors.keySet().stream().map(S3BlobStore.StatsKey::purpose).collect(Collectors.toUnmodifiableSet()), - equalTo(Set.of(OperationPurpose.SNAPSHOT, purpose)) + equalTo(Set.of(OperationPurpose.SNAPSHOT_METADATA, purpose)) ); // The stats report aggregates over different purposes final Map newStats = blobStore.stats(); @@ -341,7 +344,7 @@ public void testRequestStatsWithOperationPurposes() throws IOException { final Set operationsSeenForTheNewPurpose = statsCollectors.collectors.keySet() .stream() - .filter(sk -> sk.purpose() != OperationPurpose.SNAPSHOT) + .filter(sk -> sk.purpose() != OperationPurpose.SNAPSHOT_METADATA) .map(sk -> sk.operation().getKey()) .collect(Collectors.toUnmodifiableSet()); @@ -396,7 +399,7 @@ public void testEnforcedCooldownPeriod() throws IOException { () -> repository.blobStore() .blobContainer(repository.basePath()) .writeBlobAtomic( - randomPurpose(), + randomNonDataPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + modifiedRepositoryData.getGenId(), serialized, true diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 87b3c17bfd91c..93b8ef7e57389 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -129,6 +129,7 @@ public long readBlobPreferredLength() { @Override public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); assert inputStream.markSupported() : "No mark support on inputStream breaks the S3 SDK's ability to retry requests"; SocketAccess.doPrivilegedIOException(() -> { if (blobSize <= getLargeBlobThresholdInBytes()) { @@ -148,6 +149,7 @@ public void writeMetadataBlob( boolean atomic, CheckedConsumer writer ) throws IOException { + assert purpose != OperationPurpose.SNAPSHOT_DATA && BlobContainer.assertPurposeConsistency(purpose, blobName) : purpose; final String absoluteBlobKey = buildKey(blobName); try ( AmazonS3Reference clientReference = blobStore.clientReference(); @@ -273,6 +275,7 @@ long getLargeBlobThresholdInBytes() { @Override public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); writeBlob(purpose, blobName, bytes, failIfAlreadyExists); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 9ed68976aac8a..b4b136338923f 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -57,6 +57,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.repositories.s3.S3ClientSettings.DISABLE_CHUNKED_ENCODING; import static org.elasticsearch.repositories.s3.S3ClientSettings.ENDPOINT_SETTING; @@ -446,7 +447,7 @@ public void testWriteLargeBlobStreaming() throws Exception { } }); - blobContainer.writeMetadataBlob(randomPurpose(), "write_large_blob_streaming", false, randomBoolean(), out -> { + blobContainer.writeMetadataBlob(randomNonDataPurpose(), "write_large_blob_streaming", false, randomBoolean(), out -> { final byte[] buffer = new byte[16 * 1024]; long outstanding = blobSize; while (outstanding > 0) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java index 7886e628b26ad..bf937a9d57f02 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryCleanupIT.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFutureThrows; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -98,7 +98,7 @@ private ActionFuture startBlockedCleanup(String repoN garbageFuture, () -> repository.blobStore() .blobContainer(repository.basePath()) - .writeBlob(randomPurpose(), "snap-foo.dat", new BytesArray(new byte[1]), true) + .writeBlob(randomNonDataPurpose(), "snap-foo.dat", new BytesArray(new byte[1]), true) ) ); garbageFuture.get(); @@ -147,7 +147,7 @@ public void testCleanupOldIndexN() throws ExecutionException, InterruptedExcepti () -> repository.blobStore() .blobContainer(repository.basePath()) .writeBlob( - randomPurpose(), + randomNonDataPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + generation, new BytesArray(new byte[1]), true diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryOperationPurposeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryOperationPurposeIT.java new file mode 100644 index 0000000000000..91eb1dc6eb01b --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryOperationPurposeIT.java @@ -0,0 +1,243 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.repositories.blobstore; + +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.support.BlobMetadata; +import org.elasticsearch.common.blobstore.support.FilterBlobContainer; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; + +public class BlobStoreRepositoryOperationPurposeIT extends AbstractSnapshotIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), TestPlugin.class); + } + + public void testSnapshotOperationPurposes() throws Exception { + // Perform some simple operations on the repository in order to exercise the checks that the purpose is set correctly for various + // operations + + final var repoName = randomIdentifier(); + createRepository(repoName, TestPlugin.ASSERTING_REPO_TYPE); + + final var count = between(1, 3); + + for (int i = 0; i < count; i++) { + createIndexWithContent("index-" + i); + createFullSnapshot(repoName, "snap-" + i); + } + + final var timeout = TimeValue.timeValueSeconds(10); + clusterAdmin().prepareCleanupRepository(repoName).get(timeout); + clusterAdmin().prepareCloneSnapshot(repoName, "snap-0", "clone-0").setIndices("index-0").get(timeout); + + // restart to ensure that the reads which happen when starting a node on a nonempty repository use the expected purposes + internalCluster().fullRestart(); + + clusterAdmin().prepareGetSnapshots(repoName).get(timeout); + + clusterAdmin().prepareRestoreSnapshot(repoName, "clone-0") + .setRenamePattern("index-0") + .setRenameReplacement("restored-0") + .setWaitForCompletion(true) + .get(timeout); + + for (int i = 0; i < count; i++) { + assertTrue(startDeleteSnapshot(repoName, "snap-" + i).get(10, TimeUnit.SECONDS).isAcknowledged()); + } + + clusterAdmin().prepareDeleteRepository(repoName).get(timeout); + } + + public static class TestPlugin extends Plugin implements RepositoryPlugin { + static final String ASSERTING_REPO_TYPE = "asserting"; + + @Override + public Map getRepositories( + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + return Map.of( + ASSERTING_REPO_TYPE, + metadata -> new AssertingRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) + ); + } + } + + private static class AssertingRepository extends FsRepository { + AssertingRepository( + RepositoryMetadata metadata, + Environment environment, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + super(metadata, environment, namedXContentRegistry, clusterService, bigArrays, recoverySettings); + } + + @Override + protected BlobStore createBlobStore() throws Exception { + return new AssertingBlobStore(super.createBlobStore()); + } + } + + private static class AssertingBlobStore implements BlobStore { + private final BlobStore delegateBlobStore; + + AssertingBlobStore(BlobStore delegateBlobStore) { + this.delegateBlobStore = delegateBlobStore; + } + + @Override + public BlobContainer blobContainer(BlobPath path) { + return new AssertingBlobContainer(delegateBlobStore.blobContainer(path)); + } + + @Override + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) throws IOException { + delegateBlobStore.deleteBlobsIgnoringIfNotExists(purpose, blobNames); + } + + @Override + public void close() throws IOException { + delegateBlobStore.close(); + } + } + + private static class AssertingBlobContainer extends FilterBlobContainer { + + AssertingBlobContainer(BlobContainer delegate) { + super(delegate); + } + + @Override + protected BlobContainer wrapChild(BlobContainer child) { + return new AssertingBlobContainer(child); + } + + @Override + public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + assertPurposeConsistency(purpose, blobName); + super.writeBlob(purpose, blobName, bytes, failIfAlreadyExists); + } + + @Override + public void writeBlob( + OperationPurpose purpose, + String blobName, + InputStream inputStream, + long blobSize, + boolean failIfAlreadyExists + ) throws IOException { + assertPurposeConsistency(purpose, blobName); + super.writeBlob(purpose, blobName, inputStream, blobSize, failIfAlreadyExists); + } + + @Override + public void writeMetadataBlob( + OperationPurpose purpose, + String blobName, + boolean failIfAlreadyExists, + boolean atomic, + CheckedConsumer writer + ) throws IOException { + assertEquals(blobName, OperationPurpose.SNAPSHOT_METADATA, purpose); + assertPurposeConsistency(purpose, blobName); + super.writeMetadataBlob(purpose, blobName, failIfAlreadyExists, atomic, writer); + } + + @Override + public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) + throws IOException { + assertEquals(blobName, OperationPurpose.SNAPSHOT_METADATA, purpose); + assertPurposeConsistency(purpose, blobName); + super.writeBlobAtomic(purpose, blobName, bytes, failIfAlreadyExists); + } + + @Override + public boolean blobExists(OperationPurpose purpose, String blobName) throws IOException { + assertEquals(blobName, OperationPurpose.SNAPSHOT_METADATA, purpose); + assertPurposeConsistency(purpose, blobName); + return super.blobExists(purpose, blobName); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + assertPurposeConsistency(purpose, blobName); + return super.readBlob(purpose, blobName); + } + + @Override + public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + assertPurposeConsistency(purpose, blobName); + return super.readBlob(purpose, blobName, position, length); + } + + @Override + public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException { + assertEquals(OperationPurpose.SNAPSHOT_METADATA, purpose); + return super.listBlobsByPrefix(purpose, blobNamePrefix); + } + } + + private static void assertPurposeConsistency(OperationPurpose purpose, String blobName) { + if (blobName.startsWith(BlobStoreRepository.UPLOADED_DATA_BLOB_PREFIX)) { + assertEquals(blobName, OperationPurpose.SNAPSHOT_DATA, purpose); + } else { + assertThat( + blobName, + anyOf( + startsWith(BlobStoreRepository.INDEX_FILE_PREFIX), + startsWith(BlobStoreRepository.METADATA_PREFIX), + startsWith(BlobStoreRepository.SNAPSHOT_PREFIX), + equalTo(BlobStoreRepository.INDEX_LATEST_BLOB), + // verification + equalTo("master.dat"), + startsWith("data-") + ) + ); + assertEquals(blobName, OperationPurpose.SNAPSHOT_METADATA, purpose); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index c832f222ecc69..77c225f5d94cb 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; import java.io.InputStream; @@ -116,6 +117,7 @@ void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStrea */ default void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assert assertPurposeConsistency(purpose, blobName); writeBlob(purpose, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); } @@ -261,4 +263,33 @@ default void getRegister(OperationPurpose purpose, String key, ActionListener + *
  • {@link OperationPurpose#SNAPSHOT_DATA} is not used for blobs that look like metadata blobs.
  • + *
  • {@link OperationPurpose#SNAPSHOT_METADATA} is not used for blobs that look like data blobs.
  • + * + */ + // This is fairly lenient because we use a wide variety of blob names and purposes in tests in order to get good coverage. See + // BlobStoreRepositoryOperationPurposeIT for some stricter checks which apply during genuine snapshot operations. + static boolean assertPurposeConsistency(OperationPurpose purpose, String blobName) { + switch (purpose) { + case SNAPSHOT_DATA -> { + // must not be used for blobs with names that look like metadata blobs + assert (blobName.startsWith(BlobStoreRepository.INDEX_FILE_PREFIX) + || blobName.startsWith(BlobStoreRepository.METADATA_PREFIX) + || blobName.startsWith(BlobStoreRepository.SNAPSHOT_PREFIX) + || blobName.equals(BlobStoreRepository.INDEX_LATEST_BLOB)) == false : blobName + " should not use purpose " + purpose; + } + case SNAPSHOT_METADATA -> { + // must not be used for blobs with names that look like data blobs + assert blobName.startsWith(BlobStoreRepository.UPLOADED_DATA_BLOB_PREFIX) == false + : blobName + " should not use purpose " + purpose; + } + case REPOSITORY_ANALYSIS, CLUSTER_STATE, INDICES, TRANSLOG -> { + // no specific requirements + } + } + return true; + } } diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java b/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java index 568f2968c9e61..5df17c1948870 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java @@ -15,7 +15,8 @@ * as well as other things that requires further differentiation for the same blob operation. */ public enum OperationPurpose { - SNAPSHOT("Snapshot"), + SNAPSHOT_DATA("SnapshotData"), + SNAPSHOT_METADATA("SnapshotMetadata"), REPOSITORY_ANALYSIS("RepositoryAnalysis"), CLUSTER_STATE("ClusterState"), INDICES("Indices"), diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index 9f2971e24cbf3..e40ca70460b13 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -183,6 +183,7 @@ public boolean blobExists(OperationPurpose purpose, String blobName) { @Override public InputStream readBlob(OperationPurpose purpose, String name) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, name); final Path resolvedPath = path.resolve(name); try { return Files.newInputStream(resolvedPath); @@ -193,6 +194,7 @@ public InputStream readBlob(OperationPurpose purpose, String name) throws IOExce @Override public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); final SeekableByteChannel channel = Files.newByteChannel(path.resolve(blobName)); if (position > 0L) { channel.position(position); @@ -210,6 +212,7 @@ public long readBlobPreferredLength() { @Override public void writeBlob(OperationPurpose purpose, String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); final Path file = path.resolve(blobName); try { writeToPath(inputStream, file, blobSize); @@ -225,6 +228,7 @@ public void writeBlob(OperationPurpose purpose, String blobName, InputStream inp @Override public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assert BlobContainer.assertPurposeConsistency(purpose, blobName); final Path file = path.resolve(blobName); try { writeToPath(bytes, file); @@ -246,6 +250,7 @@ public void writeMetadataBlob( boolean atomic, CheckedConsumer writer ) throws IOException { + assert purpose != OperationPurpose.SNAPSHOT_DATA && BlobContainer.assertPurposeConsistency(purpose, blobName) : purpose; if (atomic) { final String tempBlob = tempBlobName(blobName); try { @@ -291,6 +296,7 @@ private void writeToPath( @Override public void writeBlobAtomic(OperationPurpose purpose, final String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assert purpose != OperationPurpose.SNAPSHOT_DATA && BlobContainer.assertPurposeConsistency(purpose, blobName) : purpose; final String tempBlob = tempBlobName(blobName); final Path tempBlobPath = path.resolve(tempBlob); try { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java b/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java index daf9a809dcf07..1424ef160657b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java @@ -50,7 +50,7 @@ public InputStream getInputStreamForSnapshotFile( inputStream = new SlicedInputStream(fileInfo.numberOfParts()) { @Override protected InputStream openSlice(int slice) throws IOException { - return container.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(slice)); + return container.readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(slice)); } }; } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index cd2b8c73fe90b..c45a048480383 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -513,7 +513,7 @@ public void cloneShardSnapshot( final ShardGeneration existingShardGen; if (shardGeneration == null) { Tuple tuple = buildBlobStoreIndexShardSnapshots( - shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(), + shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX).keySet(), shardContainer ); existingShardGen = new ShardGeneration(tuple.v2()); @@ -883,7 +883,7 @@ private void createSnapshotsDeletion( listener.onFailure(new RepositoryException(metadata.name(), "repository is readonly")); } else { threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(ActionRunnable.supply(listener, () -> { - final var originalRootBlobs = blobContainer().listBlobs(OperationPurpose.SNAPSHOT); + final var originalRootBlobs = blobContainer().listBlobs(OperationPurpose.SNAPSHOT_METADATA); // One final best-effort check for other clusters concurrently writing to the repository: final var originalRepositoryData = safeRepositoryData(repositoryDataGeneration, originalRootBlobs); @@ -893,7 +893,7 @@ private void createSnapshotsDeletion( repositoryDataGeneration, SnapshotsService.minCompatibleVersion(minimumNodeVersion, originalRepositoryData, snapshotIds), originalRootBlobs, - blobStore().blobContainer(indicesPath()).children(OperationPurpose.SNAPSHOT), + blobStore().blobContainer(indicesPath()).children(OperationPurpose.SNAPSHOT_DATA), originalRepositoryData ); })); @@ -1243,7 +1243,7 @@ private class ShardSnapshotsDeletion extends AbstractRunnable { @Override protected void doRun() throws Exception { shardContainer = shardContainer(indexId, shardId); - originalShardBlobs = shardContainer.listBlobs(OperationPurpose.SNAPSHOT).keySet(); + originalShardBlobs = shardContainer.listBlobs(OperationPurpose.SNAPSHOT_DATA).keySet(); final BlobStoreIndexShardSnapshots blobStoreIndexShardSnapshots; final long newGen; if (useShardGenerations) { @@ -1380,7 +1380,7 @@ private void cleanupUnlinkedShardLevelBlobs( } snapshotExecutor.execute(ActionRunnable.wrap(listener, l -> { try { - deleteFromContainer(blobContainer(), filesToDelete); + deleteFromContainer(OperationPurpose.SNAPSHOT_DATA, blobContainer(), filesToDelete); l.onResponse(null); } catch (Exception e) { logger.warn(() -> format("%s Failed to delete some blobs during snapshot delete", snapshotIds), e); @@ -1425,7 +1425,7 @@ private void cleanupUnlinkedRootAndIndicesBlobs(RepositoryData newRepositoryData staleBlobDeleteRunner.enqueueTask(listeners.acquire(ref -> { try (ref) { logStaleRootLevelBlobs(newRepositoryData.getGenId() - 1, snapshotIds, staleRootBlobs); - deleteFromContainer(blobContainer(), staleRootBlobs.iterator()); + deleteFromContainer(OperationPurpose.SNAPSHOT_METADATA, blobContainer(), staleRootBlobs.iterator()); for (final var staleRootBlob : staleRootBlobs) { bytesDeleted.addAndGet(originalRootBlobs.get(staleRootBlob).length()); } @@ -1456,7 +1456,7 @@ private void cleanupUnlinkedRootAndIndicesBlobs(RepositoryData newRepositoryData staleBlobDeleteRunner.enqueueTask(listeners.acquire(ref -> { try (ref) { logger.debug("[{}] Found stale index [{}]. Cleaning it up", metadata.name(), indexId); - final var deleteResult = indexEntry.getValue().delete(OperationPurpose.SNAPSHOT); + final var deleteResult = indexEntry.getValue().delete(OperationPurpose.SNAPSHOT_DATA); blobsDeleted.addAndGet(deleteResult.blobsDeleted()); bytesDeleted.addAndGet(deleteResult.bytesDeleted()); logger.debug("[{}] Cleaned up stale index [{}]", metadata.name(), indexId); @@ -1757,7 +1757,7 @@ private void cleanupOldMetadata( threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { - deleteFromContainer(blobContainer(), toDelete.iterator()); + deleteFromContainer(OperationPurpose.SNAPSHOT_METADATA, blobContainer(), toDelete.iterator()); } @Override @@ -1854,7 +1854,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna } } - private void deleteFromContainer(BlobContainer container, Iterator blobs) throws IOException { + private void deleteFromContainer(OperationPurpose purpose, BlobContainer container, Iterator blobs) throws IOException { final Iterator wrappedIterator; if (logger.isTraceEnabled()) { wrappedIterator = new Iterator<>() { @@ -1873,7 +1873,7 @@ public String next() { } else { wrappedIterator = blobs; } - container.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, wrappedIterator); + container.deleteBlobsIgnoringIfNotExists(purpose, wrappedIterator); } private BlobPath indicesPath() { @@ -2001,7 +2001,7 @@ public String startVerification() { String seed = UUIDs.randomBase64UUID(); byte[] testBytes = Strings.toUTF8Bytes(seed); BlobContainer testContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed))); - testContainer.writeBlobAtomic(OperationPurpose.SNAPSHOT, "master.dat", new BytesArray(testBytes), true); + testContainer.writeBlobAtomic(OperationPurpose.SNAPSHOT_METADATA, "master.dat", new BytesArray(testBytes), true); return seed; } } catch (Exception exp) { @@ -2014,7 +2014,7 @@ public void endVerification(String seed) { if (isReadOnly() == false) { try { final String testPrefix = testBlobPrefix(seed); - blobStore().blobContainer(basePath().add(testPrefix)).delete(OperationPurpose.SNAPSHOT); + blobStore().blobContainer(basePath().add(testPrefix)).delete(OperationPurpose.SNAPSHOT_METADATA); } catch (Exception exp) { throw new RepositoryVerificationException(metadata.name(), "cannot delete test data at " + basePath(), exp); } @@ -2434,7 +2434,7 @@ private RepositoryData getRepositoryData(long indexGen) { // EMPTY is safe here because RepositoryData#fromXContent calls namedObject try ( - InputStream blob = blobContainer().readBlob(OperationPurpose.SNAPSHOT, snapshotsIndexBlobName); + InputStream blob = blobContainer().readBlob(OperationPurpose.SNAPSHOT_METADATA, snapshotsIndexBlobName); XContentParser parser = XContentType.JSON.xContent() .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, blob) ) { @@ -2660,7 +2660,7 @@ public void onFailure(Exception e) { } final String indexBlob = INDEX_FILE_PREFIX + newGen; logger.debug("Repository [{}] writing new index generational blob [{}]", metadata.name(), indexBlob); - writeAtomic(blobContainer(), indexBlob, out -> { + writeAtomic(OperationPurpose.SNAPSHOT_METADATA, blobContainer(), indexBlob, out -> { try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder(org.elasticsearch.core.Streams.noCloseStream(out))) { newRepositoryData.snapshotsToXContent(xContentBuilder, version); } @@ -2750,7 +2750,13 @@ private void maybeWriteIndexLatest(long newGen) { if (supportURLRepo) { logger.debug("Repository [{}] updating index.latest with generation [{}]", metadata.name(), newGen); try { - writeAtomic(blobContainer(), INDEX_LATEST_BLOB, out -> out.write(Numbers.longToBytes(newGen)), false); + writeAtomic( + OperationPurpose.SNAPSHOT_METADATA, + blobContainer(), + INDEX_LATEST_BLOB, + out -> out.write(Numbers.longToBytes(newGen)), + false + ); } catch (Exception e) { logger.warn( () -> format( @@ -2777,7 +2783,7 @@ private void maybeWriteIndexLatest(long newGen) { private boolean ensureSafeGenerationExists(long safeGeneration, Consumer onFailure) throws IOException { logger.debug("Ensure generation [{}] that is the basis for this write exists in [{}]", safeGeneration, metadata.name()); if (safeGeneration != RepositoryData.EMPTY_REPO_GEN - && blobContainer().blobExists(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX + safeGeneration) == false) { + && blobContainer().blobExists(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX + safeGeneration) == false) { Tuple previousWriterInfo = null; Exception readRepoDataEx = null; try { @@ -2907,7 +2913,7 @@ long latestIndexBlobId() throws IOException { // package private for testing long readSnapshotIndexLatestBlob() throws IOException { final BytesReference content = Streams.readFully( - Streams.limitStream(blobContainer().readBlob(OperationPurpose.SNAPSHOT, INDEX_LATEST_BLOB), Long.BYTES + 1) + Streams.limitStream(blobContainer().readBlob(OperationPurpose.SNAPSHOT_METADATA, INDEX_LATEST_BLOB), Long.BYTES + 1) ); if (content.length() != Long.BYTES) { throw new RepositoryException( @@ -2922,7 +2928,7 @@ long readSnapshotIndexLatestBlob() throws IOException { } private long listBlobsToGetLatestIndexId() throws IOException { - return latestGeneration(blobContainer().listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet()); + return latestGeneration(blobContainer().listBlobsByPrefix(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX).keySet()); } private long latestGeneration(Collection rootBlobs) { @@ -2944,13 +2950,14 @@ private long latestGeneration(Collection rootBlobs) { } private void writeAtomic( + OperationPurpose purpose, BlobContainer container, final String blobName, CheckedConsumer writer, boolean failIfAlreadyExists ) throws IOException { logger.trace(() -> format("[%s] Writing [%s] to %s atomically", metadata.name(), blobName, container.path())); - container.writeMetadataBlob(OperationPurpose.SNAPSHOT, blobName, failIfAlreadyExists, true, writer); + container.writeMetadataBlob(purpose, blobName, failIfAlreadyExists, true, writer); } @Override @@ -2976,7 +2983,7 @@ private void doSnapshotShard(SnapshotShardContext context) { if (generation == null) { snapshotStatus.ensureNotAborted(); try { - blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(); + blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX).keySet(); } catch (IOException e) { throw new IndexShardSnapshotFailedException(shardId, "failed to list blobs", e); } @@ -3168,7 +3175,7 @@ private void doSnapshotShard(SnapshotShardContext context) { } snapshotStatus.addProcessedFiles(finalFilesInShardMetadataCount, finalFilesInShardMetadataSize); try { - deleteFromContainer(shardContainer, blobsToDelete.iterator()); + deleteFromContainer(OperationPurpose.SNAPSHOT_METADATA, shardContainer, blobsToDelete.iterator()); } catch (IOException e) { logger.warn( () -> format("[%s][%s] failed to delete old index-N blobs during finalization", snapshotId, shardId), @@ -3223,7 +3230,7 @@ private void doSnapshotShard(SnapshotShardContext context) { }, e -> { try { shardContainer.deleteBlobsIgnoringIfNotExists( - OperationPurpose.SNAPSHOT, + OperationPurpose.SNAPSHOT_DATA, Iterators.flatMap(fileToCleanUp.get().iterator(), f -> Iterators.forRange(0, f.numberOfParts(), f::partName)) ); } catch (Exception innerException) { @@ -3388,7 +3395,7 @@ private void restoreFile(BlobStoreIndexShardSnapshot.FileInfo fileInfo, Store st @Override protected InputStream openSlice(int slice) throws IOException { ensureNotClosing(store); - return container.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(slice)); + return container.readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(slice)); } })) { final byte[] buffer = new byte[Math.toIntExact(Math.min(bufferSize, fileInfo.length()))]; @@ -3527,7 +3534,12 @@ public void verify(String seed, DiscoveryNode localNode) { } else { BlobContainer testBlobContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed))); try { - testBlobContainer.writeBlob(OperationPurpose.SNAPSHOT, "data-" + localNode.getId() + ".dat", new BytesArray(seed), true); + testBlobContainer.writeBlob( + OperationPurpose.SNAPSHOT_METADATA, + "data-" + localNode.getId() + ".dat", + new BytesArray(seed), + true + ); } catch (Exception exp) { throw new RepositoryVerificationException( metadata.name(), @@ -3535,7 +3547,7 @@ public void verify(String seed, DiscoveryNode localNode) { exp ); } - try (InputStream masterDat = testBlobContainer.readBlob(OperationPurpose.SNAPSHOT, "master.dat")) { + try (InputStream masterDat = testBlobContainer.readBlob(OperationPurpose.SNAPSHOT_METADATA, "master.dat")) { final String seedRead = Streams.readFully(masterDat).utf8ToString(); if (seedRead.equals(seed) == false) { throw new RepositoryVerificationException( @@ -3582,6 +3594,7 @@ private void writeShardIndexBlobAtomic( logger.trace(() -> format("[%s] Writing shard index [%s] to [%s]", metadata.name(), indexGeneration, shardContainer.path())); final String blobName = INDEX_SHARD_SNAPSHOTS_FORMAT.blobName(String.valueOf(indexGeneration)); writeAtomic( + OperationPurpose.SNAPSHOT_METADATA, shardContainer, blobName, out -> INDEX_SHARD_SNAPSHOTS_FORMAT.serialize(updatedSnapshots, blobName, compress, serializationParams, out), @@ -3617,7 +3630,7 @@ public BlobStoreIndexShardSnapshots getBlobStoreIndexShardSnapshots(IndexId inde Set blobs = Collections.emptySet(); if (shardGen == null) { - blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT, INDEX_FILE_PREFIX).keySet(); + blobs = shardContainer.listBlobsByPrefix(OperationPurpose.SNAPSHOT_METADATA, INDEX_FILE_PREFIX).keySet(); } return buildBlobStoreIndexShardSnapshots(blobs, shardContainer, shardGen).v1(); @@ -3719,7 +3732,7 @@ private void checkAborted() { final String partName = fileInfo.partName(i); logger.trace("[{}] Writing [{}] to [{}]", metadata.name(), partName, shardContainer.path()); final long startMS = threadPool.relativeTimeInMillis(); - shardContainer.writeBlob(OperationPurpose.SNAPSHOT, partName, inputStream, partBytes, false); + shardContainer.writeBlob(OperationPurpose.SNAPSHOT_DATA, partName, inputStream, partBytes, false); logger.trace( "[{}] Writing [{}] of size [{}b] to [{}] took [{}ms]", metadata.name(), diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index 54cb6fe7c45d3..ca3ff799436c2 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -118,7 +118,7 @@ public ChecksumBlobStoreFormat( public T read(String repoName, BlobContainer blobContainer, String name, NamedXContentRegistry namedXContentRegistry) throws IOException { String blobName = blobName(name); - try (InputStream in = blobContainer.readBlob(OperationPurpose.SNAPSHOT, blobName)) { + try (InputStream in = blobContainer.readBlob(OperationPurpose.SNAPSHOT_METADATA, blobName)) { return deserialize(repoName, namedXContentRegistry, in); } } @@ -345,7 +345,7 @@ public void write(T obj, BlobContainer blobContainer, String name, boolean compr throws IOException { final String blobName = blobName(name); blobContainer.writeMetadataBlob( - OperationPurpose.SNAPSHOT, + OperationPurpose.SNAPSHOT_METADATA, blobName, false, false, diff --git a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java index 1f54046630cf8..67712af9ef57b 100644 --- a/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java +++ b/server/src/test/java/org/elasticsearch/common/blobstore/fs/FsBlobContainerTests.java @@ -46,6 +46,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -228,14 +229,19 @@ private static void checkAtomicWrite() throws IOException { BlobPath.EMPTY, path ); - container.writeBlobAtomic(randomPurpose(), blobName, new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), true); + container.writeBlobAtomic( + randomNonDataPurpose(), + blobName, + new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), + true + ); final var blobData = new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))); - container.writeBlobAtomic(randomPurpose(), blobName, blobData, false); + container.writeBlobAtomic(randomNonDataPurpose(), blobName, blobData, false); assertEquals(blobData, Streams.readFully(container.readBlob(randomPurpose(), blobName))); expectThrows( FileAlreadyExistsException.class, () -> container.writeBlobAtomic( - randomPurpose(), + randomNonDataPurpose(), blobName, new BytesArray(randomByteArrayOfLength(randomIntBetween(1, 512))), true diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index ef625706ffffe..adfc333e9dc7e 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -67,7 +68,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.repositories.RepositoryDataTests.generateRandomRepoData; -import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -204,7 +204,7 @@ public void testCorruptIndexLatestFile() throws Exception { for (int i = 0; i < 16; i++) { repository.blobContainer() - .writeBlob(randomPurpose(), BlobStoreRepository.INDEX_LATEST_BLOB, new BytesArray(buffer, 0, i), false); + .writeBlob(OperationPurpose.SNAPSHOT_METADATA, BlobStoreRepository.INDEX_LATEST_BLOB, new BytesArray(buffer, 0, i), false); if (i == 8) { assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(generation)); } else { diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 5b59040bbb04d..19f0d1e2e88a0 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -273,7 +273,11 @@ public void verifyReposThenStopServices() { (BlobStoreRepository) testClusterNodes.randomMasterNodeSafe().repositoriesService.repository("repo") ); deterministicTaskQueue.runAllRunnableTasks(); - assertNull(future.result()); + assertTrue(future.isDone()); + final var result = future.result(); + if (result != null) { + fail(result); + } } finally { testClusterNodes.nodes.values().forEach(TestClusterNodes.TestClusterNode::stop); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java index 15f33131fa114..3d4dea430a9b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/AbstractThirdPartyRepositoryTestCase.java @@ -36,6 +36,7 @@ import java.util.Set; import java.util.concurrent.Executor; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.contains; @@ -275,7 +276,7 @@ private static void createDanglingIndex(final BlobStoreRepository repo, final Ex .writeBlob(randomPurpose(), "bar", new ByteArrayInputStream(new byte[3]), 3, false); for (String prefix : Arrays.asList("snap-", "meta-")) { blobStore.blobContainer(repo.basePath()) - .writeBlob(randomPurpose(), prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); + .writeBlob(randomNonDataPurpose(), prefix + "foo.dat", new ByteArrayInputStream(new byte[3]), 3, false); } })); future.get(); @@ -285,8 +286,8 @@ private static void createDanglingIndex(final BlobStoreRepository repo, final Ex final BlobStore blobStore = repo.blobStore(); return blobStore.blobContainer(repo.basePath().add("indices")).children(randomPurpose()).containsKey("foo") && blobStore.blobContainer(repo.basePath().add("indices").add("foo")).blobExists(randomPurpose(), "bar") - && blobStore.blobContainer(repo.basePath()).blobExists(randomPurpose(), "meta-foo.dat") - && blobStore.blobContainer(repo.basePath()).blobExists(randomPurpose(), "snap-foo.dat"); + && blobStore.blobContainer(repo.basePath()).blobExists(randomNonDataPurpose(), "meta-foo.dat") + && blobStore.blobContainer(repo.basePath()).blobExists(randomNonDataPurpose(), "snap-foo.dat"); })); assertTrue(corruptionFuture.get()); } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index 383c2b3c2d13b..79e4a8da713c5 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -67,6 +67,7 @@ import static org.apache.lucene.tests.util.LuceneTestCase.random; import static org.elasticsearch.test.ESTestCase.randomFrom; import static org.elasticsearch.test.ESTestCase.randomIntBetween; +import static org.elasticsearch.test.ESTestCase.randomValueOtherThan; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasKey; @@ -105,7 +106,7 @@ public static PlainActionFuture assertConsistencyAsync(BlobStore try { final BlobContainer blobContainer = repository.blobContainer(); final long latestGen; - try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob(randomPurpose(), "index.latest"))) { + try (DataInputStream inputStream = new DataInputStream(blobContainer.readBlob(randomNonDataPurpose(), "index.latest"))) { latestGen = inputStream.readLong(); } catch (NoSuchFileException e) { throw new AssertionError("Could not find index.latest blob for repo [" + repository + "]"); @@ -113,7 +114,7 @@ public static PlainActionFuture assertConsistencyAsync(BlobStore assertIndexGenerations(blobContainer, latestGen); final RepositoryData repositoryData; try ( - InputStream blob = blobContainer.readBlob(randomPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + latestGen); + InputStream blob = blobContainer.readBlob(randomNonDataPurpose(), BlobStoreRepository.INDEX_FILE_PREFIX + latestGen); XContentParser parser = XContentType.JSON.xContent() .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), blob) ) { @@ -462,4 +463,8 @@ private static ClusterService mockClusterService(ClusterState initialState) { public static OperationPurpose randomPurpose() { return randomFrom(OperationPurpose.values()); } + + public static OperationPurpose randomNonDataPurpose() { + return randomValueOtherThan(OperationPurpose.SNAPSHOT_DATA, BlobStoreTestUtil::randomPurpose); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 578a7898bcd1e..a2499c06d6ccc 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; @@ -62,6 +63,7 @@ import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.READONLY_SETTING_KEY; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.SNAPSHOT_INDEX_NAME_FORMAT; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.SNAPSHOT_NAME_FORMAT; +import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomNonDataPurpose; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -228,7 +230,7 @@ public static void writeBlob( if (randomBoolean()) { container.writeBlob(randomPurpose(), blobName, bytesArray, failIfAlreadyExists); } else { - container.writeBlobAtomic(randomPurpose(), blobName, bytesArray, failIfAlreadyExists); + container.writeBlobAtomic(randomNonDataPurpose(), blobName, bytesArray, failIfAlreadyExists); } } @@ -556,7 +558,7 @@ public void testDanglingShardLevelBlobCleanup() throws Exception { // Create an extra dangling blob as if from an earlier snapshot that failed to clean up shardContainer.writeBlob( - randomPurpose(), + OperationPurpose.SNAPSHOT_DATA, BlobStoreRepository.UPLOADED_DATA_BLOB_PREFIX + UUIDs.randomBase64UUID(random()), BytesArray.EMPTY, true diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java index aab3e83a4f496..ea85a91677c46 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java @@ -341,7 +341,7 @@ public String toString() { private InputStream openBlobStream(int part, long pos, long length) throws IOException { assert MetadataCachingIndexInput.assertCurrentThreadMayAccessBlobStore(); stats.addBlobStoreBytesRequested(length); - return blobContainer.readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(part), pos, length); + return blobContainer.readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(part), pos, length); } private static class StreamForSequentialReads implements Closeable { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index 2b61dc18e266c..e9f4ab11c9b7c 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -528,7 +528,7 @@ protected InputStream openInputStreamFromBlobStore(final long position, final lo assert position + readLength <= fileInfo.length() : "cannot read [" + position + "-" + (position + readLength) + "] from [" + fileInfo + "]"; stats.addBlobStoreBytesRequested(readLength); - return directory.blobContainer().readBlob(OperationPurpose.SNAPSHOT, fileInfo.name(), position, readLength); + return directory.blobContainer().readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.name(), position, readLength); } return openInputStreamMultipleParts(position, readLength); } @@ -558,7 +558,7 @@ protected InputStream openSlice(int slice) throws IOException { ? getRelativePositionInPart(position + readLength - 1) + 1 : fileInfo.partBytes(currentPart); return directory.blobContainer() - .readBlob(OperationPurpose.SNAPSHOT, fileInfo.partName(currentPart), startInPart, endInPart - startInPart); + .readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(currentPart), startInPart, endInPart - startInPart); } }; } From d3fefde0a33031a4a35c33df8f5f9f3beb04cd8c Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Mon, 4 Dec 2023 13:58:23 +0100 Subject: [PATCH 172/263] ESQL: add unit tests for conversion functions. Extend `TEXT` type support for them (#102746) This adds the missing unit tests for the conversion functions. It also extends the type support by adding the `TEXT` type to those functions that support `KEYWORD` already (which also simplifies the testing, actually). Some functions did have it, some didn't; they now all do. The change also fixes two defects resulting from better testing coverage: `ToInteger` and `ToUnsignedLong` had some missing necessary exceptions declarations in the decorators for the evaluators. It also updates `ToInteger`'s `fromDouble()` conversion to use a newly added utility, so that the failed conversions contain the right message (`out of [integer] range`, instead of the confusing `out of [long] range`). Related: #102488, #102552. --- .../esql/functions/signature/to_boolean.svg | 1 + .../esql/functions/signature/to_datetime.svg | 1 + .../esql/functions/signature/to_degrees.svg | 1 + .../esql/functions/signature/to_double.svg | 1 + .../esql/functions/signature/to_integer.svg | 1 + .../esql/functions/signature/to_ip.svg | 2 +- .../esql/functions/signature/to_long.svg | 1 + .../esql/functions/signature/to_radians.svg | 1 + .../functions/signature/to_unsigned_long.svg | 1 + .../esql/functions/types/mv_count.asciidoc | 2 + .../esql/functions/types/to_boolean.asciidoc | 11 + .../esql/functions/types/to_datetime.asciidoc | 11 + .../esql/functions/types/to_degrees.asciidoc | 8 + .../esql/functions/types/to_double.asciidoc | 12 + .../esql/functions/types/to_integer.asciidoc | 12 + .../esql/functions/types/to_ip.asciidoc | 3 +- .../esql/functions/types/to_long.asciidoc | 14 + .../esql/functions/types/to_radians.asciidoc | 8 + .../esql/functions/types/to_string.asciidoc | 2 + .../functions/types/to_unsigned_long.asciidoc | 12 + .../xpack/esql/CsvTestUtils.java | 3 +- .../src/main/resources/ints.csv-spec | 28 +- .../src/main/resources/show.csv-spec | 183 ++++++------ .../convert/ToIntegerFromDoubleEvaluator.java | 7 +- .../convert/ToIntegerFromLongEvaluator.java | 7 +- .../convert/ToIntegerFromStringEvaluator.java | 7 +- .../ToIntegerFromUnsignedLongEvaluator.java | 7 +- .../ToUnsignedLongFromIntEvaluator.java | 31 +- .../ToUnsignedLongFromLongEvaluator.java | 31 +- .../ToUnsignedLongFromStringEvaluator.java | 7 +- .../convert/AbstractConvertFunction.java | 43 ++- .../function/scalar/convert/ToBoolean.java | 10 +- .../function/scalar/convert/ToDatetime.java | 10 +- .../function/scalar/convert/ToDegrees.java | 5 +- .../function/scalar/convert/ToDouble.java | 10 +- .../function/scalar/convert/ToIP.java | 9 +- .../function/scalar/convert/ToInteger.java | 32 +- .../function/scalar/convert/ToLong.java | 13 +- .../function/scalar/convert/ToRadians.java | 5 +- .../function/scalar/convert/ToString.java | 2 + .../scalar/convert/ToUnsignedLong.java | 16 +- .../function/scalar/convert/ToVersion.java | 2 + .../xpack/esql/analysis/AnalyzerTests.java | 3 +- .../function/AbstractFunctionTestCase.java | 63 +++- .../expression/function/TestCaseSupplier.java | 158 ++++++---- .../scalar/convert/ToBooleanTests.java | 90 ++++++ .../scalar/convert/ToDatetimeTests.java | 152 ++++++++++ .../scalar/convert/ToDegreesTests.java | 80 +++++ .../scalar/convert/ToDoubleTests.java | 122 ++++++++ .../function/scalar/convert/ToIPTests.java | 48 ++- .../scalar/convert/ToIntegerTests.java | 277 ++++++++++++++++++ .../function/scalar/convert/ToLongTests.java | 217 ++++++++++++++ .../scalar/convert/ToRadiansTests.java | 80 +++++ .../scalar/convert/ToUnsignedLongTests.java | 258 ++++++++++++++++ .../scalar/convert/ToVersionTests.java | 33 +-- .../xpack/ql/type/DataTypeConverter.java | 8 + 56 files changed, 1892 insertions(+), 270 deletions(-) create mode 100644 docs/reference/esql/functions/signature/to_boolean.svg create mode 100644 docs/reference/esql/functions/signature/to_datetime.svg create mode 100644 docs/reference/esql/functions/signature/to_degrees.svg create mode 100644 docs/reference/esql/functions/signature/to_double.svg create mode 100644 docs/reference/esql/functions/signature/to_integer.svg create mode 100644 docs/reference/esql/functions/signature/to_long.svg create mode 100644 docs/reference/esql/functions/signature/to_radians.svg create mode 100644 docs/reference/esql/functions/signature/to_unsigned_long.svg create mode 100644 docs/reference/esql/functions/types/to_boolean.asciidoc create mode 100644 docs/reference/esql/functions/types/to_datetime.asciidoc create mode 100644 docs/reference/esql/functions/types/to_degrees.asciidoc create mode 100644 docs/reference/esql/functions/types/to_double.asciidoc create mode 100644 docs/reference/esql/functions/types/to_integer.asciidoc create mode 100644 docs/reference/esql/functions/types/to_long.asciidoc create mode 100644 docs/reference/esql/functions/types/to_radians.asciidoc create mode 100644 docs/reference/esql/functions/types/to_unsigned_long.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java diff --git a/docs/reference/esql/functions/signature/to_boolean.svg b/docs/reference/esql/functions/signature/to_boolean.svg new file mode 100644 index 0000000000000..43c2aac2bca53 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_boolean.svg @@ -0,0 +1 @@ +TO_BOOLEAN(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_datetime.svg b/docs/reference/esql/functions/signature/to_datetime.svg new file mode 100644 index 0000000000000..eb9e74248471a --- /dev/null +++ b/docs/reference/esql/functions/signature/to_datetime.svg @@ -0,0 +1 @@ +TO_DATETIME(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_degrees.svg b/docs/reference/esql/functions/signature/to_degrees.svg new file mode 100644 index 0000000000000..01fe0a4770156 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_degrees.svg @@ -0,0 +1 @@ +TO_DEGREES(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_double.svg b/docs/reference/esql/functions/signature/to_double.svg new file mode 100644 index 0000000000000..e785e30ce5f81 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_double.svg @@ -0,0 +1 @@ +TO_DOUBLE(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_integer.svg b/docs/reference/esql/functions/signature/to_integer.svg new file mode 100644 index 0000000000000..beb2e94039e53 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_integer.svg @@ -0,0 +1 @@ +TO_INTEGER(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_ip.svg b/docs/reference/esql/functions/signature/to_ip.svg index c049964b254f3..c1669c9376c8b 100644 --- a/docs/reference/esql/functions/signature/to_ip.svg +++ b/docs/reference/esql/functions/signature/to_ip.svg @@ -1 +1 @@ -TO_IP(arg1) \ No newline at end of file +TO_IP(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_long.svg b/docs/reference/esql/functions/signature/to_long.svg new file mode 100644 index 0000000000000..464d4a001cb35 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_long.svg @@ -0,0 +1 @@ +TO_LONG(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_radians.svg b/docs/reference/esql/functions/signature/to_radians.svg new file mode 100644 index 0000000000000..712431fb32497 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_radians.svg @@ -0,0 +1 @@ +TO_RADIANS(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/to_unsigned_long.svg b/docs/reference/esql/functions/signature/to_unsigned_long.svg new file mode 100644 index 0000000000000..da07b3a4c7349 --- /dev/null +++ b/docs/reference/esql/functions/signature/to_unsigned_long.svg @@ -0,0 +1 @@ +TO_UNSIGNED_LONG(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index 21794bcb1b959..440e66d11096e 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -2,8 +2,10 @@ |=== v | result boolean | integer +cartesian_point | integer datetime | integer double | integer +geo_point | integer integer | integer ip | integer keyword | integer diff --git a/docs/reference/esql/functions/types/to_boolean.asciidoc b/docs/reference/esql/functions/types/to_boolean.asciidoc new file mode 100644 index 0000000000000..7f543963eb090 --- /dev/null +++ b/docs/reference/esql/functions/types/to_boolean.asciidoc @@ -0,0 +1,11 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | boolean +double | boolean +integer | boolean +keyword | boolean +long | boolean +text | boolean +unsigned_long | boolean +|=== diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc new file mode 100644 index 0000000000000..bbd755f81f4da --- /dev/null +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -0,0 +1,11 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +datetime | datetime +double | datetime +integer | datetime +keyword | datetime +long | datetime +text | datetime +unsigned_long | datetime +|=== diff --git a/docs/reference/esql/functions/types/to_degrees.asciidoc b/docs/reference/esql/functions/types/to_degrees.asciidoc new file mode 100644 index 0000000000000..7cb7ca46022c2 --- /dev/null +++ b/docs/reference/esql/functions/types/to_degrees.asciidoc @@ -0,0 +1,8 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +double | double +integer | double +long | double +unsigned_long | double +|=== diff --git a/docs/reference/esql/functions/types/to_double.asciidoc b/docs/reference/esql/functions/types/to_double.asciidoc new file mode 100644 index 0000000000000..38e8482b77544 --- /dev/null +++ b/docs/reference/esql/functions/types/to_double.asciidoc @@ -0,0 +1,12 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | double +datetime | double +double | double +integer | double +keyword | double +long | double +text | double +unsigned_long | double +|=== diff --git a/docs/reference/esql/functions/types/to_integer.asciidoc b/docs/reference/esql/functions/types/to_integer.asciidoc new file mode 100644 index 0000000000000..bcea15b9ec80b --- /dev/null +++ b/docs/reference/esql/functions/types/to_integer.asciidoc @@ -0,0 +1,12 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | integer +datetime | integer +double | integer +integer | integer +keyword | integer +long | integer +text | integer +unsigned_long | integer +|=== diff --git a/docs/reference/esql/functions/types/to_ip.asciidoc b/docs/reference/esql/functions/types/to_ip.asciidoc index a21bbf14d87ca..6d7f9338a9aeb 100644 --- a/docs/reference/esql/functions/types/to_ip.asciidoc +++ b/docs/reference/esql/functions/types/to_ip.asciidoc @@ -1,6 +1,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result ip | ip keyword | ip +text | ip |=== diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc new file mode 100644 index 0000000000000..5c063739fc5b1 --- /dev/null +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -0,0 +1,14 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | long +cartesian_point | long +datetime | long +double | long +geo_point | long +integer | long +keyword | long +long | long +text | long +unsigned_long | long +|=== diff --git a/docs/reference/esql/functions/types/to_radians.asciidoc b/docs/reference/esql/functions/types/to_radians.asciidoc new file mode 100644 index 0000000000000..7cb7ca46022c2 --- /dev/null +++ b/docs/reference/esql/functions/types/to_radians.asciidoc @@ -0,0 +1,8 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +double | double +integer | double +long | double +unsigned_long | double +|=== diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index b8fcd4477aa70..4de4af735b07f 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -2,8 +2,10 @@ |=== v | result boolean | keyword +cartesian_point | keyword datetime | keyword double | keyword +geo_point | keyword integer | keyword ip | keyword keyword | keyword diff --git a/docs/reference/esql/functions/types/to_unsigned_long.asciidoc b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc new file mode 100644 index 0000000000000..76d9cf44f4dd2 --- /dev/null +++ b/docs/reference/esql/functions/types/to_unsigned_long.asciidoc @@ -0,0 +1,12 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +v | result +boolean | unsigned_long +datetime | unsigned_long +double | unsigned_long +integer | unsigned_long +keyword | unsigned_long +long | unsigned_long +text | unsigned_long +unsigned_long | unsigned_long +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index 060a137b69b7c..ebe27225becb1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -327,8 +327,7 @@ public static ExpectedResults loadCsvSpecValues(String csv) { for (int i = 0; i < row.size(); i++) { String value = row.get(i); if (value == null || value.trim().equalsIgnoreCase(NULL_VALUE)) { - value = null; - rowValues.add(columnTypes.get(i).convert(value)); + rowValues.add(null); continue; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 0f6fc42860750..3e28c8bc2cb9b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -197,8 +197,7 @@ long:long |int:integer convertULToInt#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warning:Line 1:57: evaluation of [to_int(ul)] failed, treating result as null. Only first 20 failures recorded. -// UL conversion to int dips into long; not the most efficient, but it's how SQL does it too. -warning:Line 1:57: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [long] range +warning:Line 1:57: org.elasticsearch.xpack.ql.InvalidArgumentException: [9223372036854775808] out of [integer] range ul:ul |int:integer [2147483647, 9223372036854775808]|2147483647 @@ -219,20 +218,29 @@ tf:boolean |t2i:integer |f2i:integer |tf2i:integer ; convertStringToInt -row int_str = "2147483647", int_dbl_str = "2147483647.2" | eval is2i = to_integer(int_str), ids2i = to_integer(int_dbl_str), overflow = to_integer("2147483648"), no_number = to_integer("foo"); -warning:Line 1:137: evaluation of [to_integer(\"2147483648\")] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:137: java.lang.NumberFormatException: For input string: \"2147483648\" -warning:Line 1:175: evaluation of [to_integer(\"foo\")] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:175: java.lang.NumberFormatException: For input string: \"foo\" +row int_str = "2147483647", int_dbl_str = "2147483646.2" | eval is2i = to_integer(int_str), ids2i = to_integer(int_dbl_str); -int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer |overflow:integer |no_number:integer -2147483647 |2147483647.2 |2147483647 |2147483647 |null |null +int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer +2147483647 |2147483646.2 |2147483647 |2147483646 +; + +convertStringToIntFail#[skip:-8.11.99, reason:double rounding in conversion updated in 8.12] +row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); +warning:Line 1:79: evaluation of [to_integer(str1)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:79: java.lang.NumberFormatException: For input string: \"2147483647.2\" +warning:Line 1:102: evaluation of [to_integer(str2)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:102: java.lang.NumberFormatException: For input string: \"2147483648\" +warning:Line 1:126: evaluation of [to_integer(non)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:126: java.lang.NumberFormatException: For input string: \"no number\" + +str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer |noi:integer +2147483647.2 |2147483648 |no number |null |null |null ; convertDoubleToInt#[skip:-8.11.99, reason:ql exceptions were updated in 8.12] row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warning:Line 1:54: evaluation of [to_integer(1e19)] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:54: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [long] range +warning:Line 1:54: org.elasticsearch.xpack.ql.InvalidArgumentException: [1.0E19] out of [integer] range d:double |d2i:integer |overflow:integer 123.4 |123 |null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index b23e4d87fe52f..ffad468790998 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -5,6 +5,7 @@ v:long 1 ; +# TODO: switch this test to ``&format=csv&delimiter=|` output showFunctions#[skip:-8.11.99] show functions; @@ -71,27 +72,27 @@ sum |? sum(arg1:?) tan |"double tan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false tanh |"double tanh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false tau |? tau() | null | null | null |? | "" | null | false -to_bool |? to_bool(arg1:?) |arg1 |? | "" |? | "" | false | false -to_boolean |? to_boolean(arg1:?) |arg1 |? | "" |? | "" | false | false +to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false +to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false to_cartesianpoint |? to_cartesianpoint(arg1:?) |arg1 |? | "" |? | "" | false | false -to_datetime |? to_datetime(arg1:?) |arg1 |? | "" |? | "" | false | false -to_dbl |? to_dbl(arg1:?) |arg1 |? | "" |? | "" | false | false -to_degrees |? to_degrees(arg1:?) |arg1 |? | "" |? | "" | false | false -to_double |? to_double(arg1:?) |arg1 |? | "" |? | "" | false | false -to_dt |? to_dt(arg1:?) |arg1 |? | "" |? | "" | false | false +to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false +to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false +to_degrees |"double to_degrees(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false +to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false +to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false to_geopoint |? to_geopoint(arg1:?) |arg1 |? | "" |? | "" | false | false -to_int |? to_int(arg1:?) |arg1 |? | "" |? | "" | false | false -to_integer |? to_integer(arg1:?) |arg1 |? | "" |? | "" | false | false -to_ip |? to_ip(arg1:?) |arg1 |? | "" |? | "" | false | false -to_long |? to_long(arg1:?) |arg1 |? | "" |? | "" | false | false -to_radians |? to_radians(arg1:?) |arg1 |? | "" |? | "" | false | false -to_str |"? to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)"|v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "" | false | false -to_string |"? to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)"|v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | "" |? | "" | false | false -to_ul |? to_ul(arg1:?) |arg1 |? | "" |? | "" | false | false -to_ulong |? to_ulong(arg1:?) |arg1 |? | "" |? | "" | false | false -to_unsigned_long |? to_unsigned_long(arg1:?) |arg1 |? | "" |? | "" | false | false -to_ver |"? to_ver(v:keyword|text|version)" |v |"keyword|text|version"| "" |? | "" | false | false -to_version |"? to_version(v:keyword|text|version)" |v |"keyword|text|version"| "" |? | "" | false | false +to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false +to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false +to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | |false |false +to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point" | |long | |false |false +to_radians |"double to_radians(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false +to_str |"keyword to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | |keyword | |false |false +to_string |"keyword to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" |v |"unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point" | |keyword | |false |false +to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false +to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false +to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | |false |false +to_ver |"version to_ver(v:keyword|text|version)" |v |"keyword|text|version" | |version | |false |false +to_version |"version to_version(v:keyword|text|version)" |v |"keyword|text|version" | |version | |false |false trim |"keyword|text trim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading and trailing whitespaces from a string.| false | false ; @@ -99,90 +100,90 @@ trim |"keyword|text trim(str:keyword|text)" showFunctionsSynopsis#[skip:-8.11.99] show functions | keep synopsis; -synopsis:keyword -"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" -"double acos(n:integer|long|double|unsigned_long)" -"double asin(n:integer|long|double|unsigned_long)" -"double atan(n:integer|long|double|unsigned_long)" -"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" +synopsis:keyword +"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" +"double acos(n:integer|long|double|unsigned_long)" +"double asin(n:integer|long|double|unsigned_long)" +"double atan(n:integer|long|double|unsigned_long)" +"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" "double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" -? avg(arg1:?) -? case(arg1:?, arg2...:?) -"? ceil(n:integer|long|double|unsigned_long)" -? cidr_match(arg1:?, arg2...:?) -? coalesce(arg1:?, arg2...:?) -? concat(arg1:?, arg2...:?) -"double cos(n:integer|long|double|unsigned_long)" -"double cosh(n:integer|long|double|unsigned_long)" -? count(arg1:?) -? count_distinct(arg1:?, arg2:?) -? date_extract(arg1:?, arg2:?) -? date_format(arg1:?, arg2:?) +? avg(arg1:?) +? case(arg1:?, arg2...:?) +"? ceil(n:integer|long|double|unsigned_long)" +? cidr_match(arg1:?, arg2...:?) +? coalesce(arg1:?, arg2...:?) +? concat(arg1:?, arg2...:?) +"double cos(n:integer|long|double|unsigned_long)" +"double cosh(n:integer|long|double|unsigned_long)" +? count(arg1:?) +? count_distinct(arg1:?, arg2:?) +? date_extract(arg1:?, arg2:?) +? date_format(arg1:?, arg2:?) "date date_parse(?datePattern:keyword, dateString:keyword|text)" -? date_trunc(arg1:?, arg2:?) -? e() -? ends_with(arg1:?, arg2:?) -"? floor(n:integer|long|double|unsigned_long)" -"? greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -? is_finite(arg1:?) -? is_infinite(arg1:?) -? is_nan(arg1:?) -"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" -"? left(string:keyword, length:integer)" -? length(arg1:?) -"? log10(n:integer|long|double|unsigned_long)" +? date_trunc(arg1:?, arg2:?) +? e() +? ends_with(arg1:?, arg2:?) +"? floor(n:integer|long|double|unsigned_long)" +"? greatest(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +? is_finite(arg1:?) +? is_infinite(arg1:?) +? is_nan(arg1:?) +"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" +? left(string:keyword, length:integer) +? length(arg1:?) +"? log10(n:integer|long|double|unsigned_long)" "keyword|text ltrim(str:keyword|text)" -? max(arg1:?) -? median(arg1:?) -? median_absolute_deviation(arg1:?) -? min(arg1:?) -? mv_avg(arg1:?) +? max(arg1:?) +? median(arg1:?) +? median_absolute_deviation(arg1:?) +? min(arg1:?) +? mv_avg(arg1:?) "keyword mv_concat(v:text|keyword, delim:text|keyword)" "integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" "? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" "? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -? mv_median(arg1:?) +? mv_median(arg1:?) "? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" -? mv_sum(arg1:?) -? now() -? percentile(arg1:?, arg2:?) -? pi() -"? pow(base:integer|unsigned_long|long|double, exponent:integer|unsigned_long|long|double)" -"? replace(arg1:?, arg2:?, arg3:?)" -"? right(string:keyword, length:integer)" -? round(arg1:?, arg2:?) +? mv_sum(arg1:?) +? now() +? percentile(arg1:?, arg2:?) +? pi() +"? pow(base:integer|unsigned_long|long|double, exponent:integer|unsigned_long|long|double)" +? replace(arg1:?, arg2:?, arg3:?) +? right(string:keyword, length:integer) +? round(arg1:?, arg2:?) "keyword|text rtrim(str:keyword|text)" -"double sin(n:integer|long|double|unsigned_long)" +"double sin(n:integer|long|double|unsigned_long)" "double sinh(n:integer|long|double|unsigned_long)" -? split(arg1:?, arg2:?) -"? sqrt(n:integer|long|double|unsigned_long)" -? starts_with(arg1:?, arg2:?) -? substring(arg1:?, arg2:?, arg3:?) -? sum(arg1:?) -"double tan(n:integer|long|double|unsigned_long)" -"double tanh(n:integer|long|double|unsigned_long)" -? tau() -? to_bool(arg1:?) -? to_boolean(arg1:?) +? split(arg1:?, arg2:?) +"? sqrt(n:integer|long|double|unsigned_long)" +? starts_with(arg1:?, arg2:?) +? substring(arg1:?, arg2:?, arg3:?) +? sum(arg1:?) +"double tan(n:integer|long|double|unsigned_long)" +"double tanh(n:integer|long|double|unsigned_long)" +? tau() +"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" +"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" ? to_cartesianpoint(arg1:?) -? to_datetime(arg1:?) -? to_dbl(arg1:?) -? to_degrees(arg1:?) -? to_double(arg1:?) -? to_dt(arg1:?) +"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" +"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"double to_degrees(v:double|long|unsigned_long|integer)" +"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" ? to_geopoint(arg1:?) -? to_int(arg1:?) -? to_integer(arg1:?) -? to_ip(arg1:?) -? to_long(arg1:?) -? to_radians(arg1:?) -"? to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -"? to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" -? to_ul(arg1:?) -? to_ulong(arg1:?) -? to_unsigned_long(arg1:?) -"? to_ver(v:keyword|text|version)" -"? to_version(v:keyword|text|version)" +"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"ip to_ip(v:ip|keyword|text)" +"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer|geo_point|cartesian_point)" +"double to_radians(v:double|long|unsigned_long|integer)" +"keyword to_str(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" +"keyword to_string(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long|geo_point|cartesian_point)" +"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" +"version to_ver(v:keyword|text|version)" +"version to_version(v:keyword|text|version)" "keyword|text trim(str:keyword|text)" ; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java index b7ff410d07c15..329269bafd9ba 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -39,7 +38,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -48,7 +47,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -82,7 +81,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java index 742b057c06799..f9b3cb60dad2c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -39,7 +38,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -48,7 +47,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -82,7 +81,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index bff4d46b09dff..600fa293394f9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -40,7 +41,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -49,7 +50,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p, scratchPad)); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); builder.appendNull(); } @@ -84,7 +85,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java index ccd1edc4aa6c2..34128e44f1500 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -39,7 +38,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantIntBlockWith(evalValue(vector, 0), positionCount); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -48,7 +47,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendInt(evalValue(vector, p)); - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); builder.appendNull(); } @@ -82,7 +81,7 @@ public Block evalBlock(Block b) { } builder.appendInt(value); valuesAppended = true; - } catch (InvalidArgumentException | QlIllegalArgumentException e) { + } catch (InvalidArgumentException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java index d3ccf82f2cb05..703f0729654a8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -35,11 +36,21 @@ public Block evalVector(Vector v) { IntVector vector = (IntVector) v; int positionCount = v.getPositionCount(); if (vector.isConstant()) { - return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + } catch (InvalidArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } } try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - builder.appendLong(evalValue(vector, p)); + try { + builder.appendLong(evalValue(vector, p)); + } catch (InvalidArgumentException e) { + registerException(e); + builder.appendNull(); + } } return builder.build(); } @@ -62,13 +73,17 @@ public Block evalBlock(Block b) { boolean positionOpened = false; boolean valuesAppended = false; for (int i = start; i < end; i++) { - long value = evalValue(block, i); - if (positionOpened == false && valueCount > 1) { - builder.beginPositionEntry(); - positionOpened = true; + try { + long value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (InvalidArgumentException e) { + registerException(e); } - builder.appendLong(value); - valuesAppended = true; } if (valuesAppended == false) { builder.appendNull(); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java index 2f01aef20edde..b43b961f5d34a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -34,11 +35,21 @@ public Block evalVector(Vector v) { LongVector vector = (LongVector) v; int positionCount = v.getPositionCount(); if (vector.isConstant()) { - return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + try { + return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0), positionCount); + } catch (InvalidArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } } try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { - builder.appendLong(evalValue(vector, p)); + try { + builder.appendLong(evalValue(vector, p)); + } catch (InvalidArgumentException e) { + registerException(e); + builder.appendNull(); + } } return builder.build(); } @@ -61,13 +72,17 @@ public Block evalBlock(Block b) { boolean positionOpened = false; boolean valuesAppended = false; for (int i = start; i < end; i++) { - long value = evalValue(block, i); - if (positionOpened == false && valueCount > 1) { - builder.beginPositionEntry(); - positionOpened = true; + try { + long value = evalValue(block, i); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendLong(value); + valuesAppended = true; + } catch (InvalidArgumentException e) { + registerException(e); } - builder.appendLong(value); - valuesAppended = true; } if (valuesAppended == false) { builder.appendNull(); diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java index 4552154560421..5b46fe2bfc9bf 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.tree.Source; /** @@ -40,7 +41,7 @@ public Block evalVector(Vector v) { if (vector.isConstant()) { try { return driverContext.blockFactory().newConstantLongBlockWith(evalValue(vector, 0, scratchPad), positionCount); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); return driverContext.blockFactory().newConstantNullBlock(positionCount); } @@ -49,7 +50,7 @@ public Block evalVector(Vector v) { for (int p = 0; p < positionCount; p++) { try { builder.appendLong(evalValue(vector, p, scratchPad)); - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); builder.appendNull(); } @@ -84,7 +85,7 @@ public Block evalBlock(Block b) { } builder.appendLong(value); valuesAppended = true; - } catch (NumberFormatException e) { + } catch (InvalidArgumentException | NumberFormatException e) { registerException(e); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 0da3717f758bf..1772916ba801c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; +import joptsimple.internal.Strings; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.elasticsearch.compute.data.Block; @@ -20,12 +22,18 @@ import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.function.Function; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; @@ -35,6 +43,15 @@ */ public abstract class AbstractConvertFunction extends UnaryScalarFunction implements EvaluatorMapper { + // the numeric types convert functions need to handle; the other numeric types are converted upstream to one of these + private static final List NUMERIC_TYPES = List.of( + DataTypes.INTEGER, + DataTypes.LONG, + DataTypes.UNSIGNED_LONG, + DataTypes.DOUBLE + ); + public static final List STRING_TYPES = DataTypes.types().stream().filter(EsqlDataTypes::isString).toList(); + protected AbstractConvertFunction(Source source, Expression field) { super(source, field); } @@ -56,13 +73,25 @@ protected final TypeResolution resolveType() { if (childrenResolved() == false) { return new TypeResolution("Unresolved children"); } - return isType( - field(), - factories()::containsKey, - sourceText(), - null, - factories().keySet().stream().map(dt -> dt.name().toLowerCase(Locale.ROOT)).sorted().toArray(String[]::new) - ); + return isType(field(), factories()::containsKey, sourceText(), null, supportedTypesNames(factories().keySet())); + } + + public static String supportedTypesNames(Set types) { + List supportedTypesNames = new ArrayList<>(types.size()); + HashSet supportTypes = new HashSet<>(types); + if (supportTypes.containsAll(NUMERIC_TYPES)) { + supportedTypesNames.add("numeric"); + NUMERIC_TYPES.forEach(supportTypes::remove); + } + + if (types.containsAll(STRING_TYPES)) { + supportedTypesNames.add("string"); + STRING_TYPES.forEach(supportTypes::remove); + } + + supportTypes.forEach(t -> supportedTypesNames.add(t.name().toLowerCase(Locale.ROOT))); + supportedTypesNames.sort(String::compareTo); + return Strings.join(supportedTypesNames, " or "); } @FunctionalInterface diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 442c106042fa0..3a33e086d8fdd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,6 +25,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; @@ -31,13 +34,18 @@ public class ToBoolean extends AbstractConvertFunction { private static final Map EVALUATORS = Map.ofEntries( Map.entry(BOOLEAN, (field, source) -> field), Map.entry(KEYWORD, ToBooleanFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToBooleanFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToBooleanFromDoubleEvaluator.Factory::new), Map.entry(LONG, ToBooleanFromLongEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToBooleanFromUnsignedLongEvaluator.Factory::new), Map.entry(INTEGER, ToBooleanFromIntEvaluator.Factory::new) ); - public ToBoolean(Source source, Expression field) { + @FunctionInfo(returnType = "boolean") + public ToBoolean( + Source source, + @Param(name = "v", type = { "boolean", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index 9910447708b44..c2f621433ca21 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -23,6 +25,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; public class ToDatetime extends AbstractConvertFunction { @@ -31,12 +34,17 @@ public class ToDatetime extends AbstractConvertFunction { Map.entry(DATETIME, (field, source) -> field), Map.entry(LONG, (field, source) -> field), Map.entry(KEYWORD, ToDatetimeFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToDatetimeFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToLongFromDoubleEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToLongFromUnsignedLongEvaluator.Factory::new), Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd ); - public ToDatetime(Source source, Expression field) { + @FunctionInfo(returnType = "date") + public ToDatetime( + Source source, + @Param(name = "v", type = { "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index 6b0d638e875a0..44f8507d880d8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -9,6 +9,8 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -37,7 +39,8 @@ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMappe ) ); - public ToDegrees(Source source, Expression field) { + @FunctionInfo(returnType = "double") + public ToDegrees(Source source, @Param(name = "v", type = { "double", "long", "unsigned_long", "integer" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index e83a0eae8d7a8..7711f55d667ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -23,6 +25,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; @@ -33,12 +36,17 @@ public class ToDouble extends AbstractConvertFunction { Map.entry(BOOLEAN, ToDoubleFromBooleanEvaluator.Factory::new), Map.entry(DATETIME, ToDoubleFromLongEvaluator.Factory::new), // CastLongToDoubleEvaluator would be a candidate, but not MV'd Map.entry(KEYWORD, ToDoubleFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToDoubleFromStringEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToDoubleFromUnsignedLongEvaluator.Factory::new), Map.entry(LONG, ToDoubleFromLongEvaluator.Factory::new), // CastLongToDoubleEvaluator would be a candidate, but not MV'd Map.entry(INTEGER, ToDoubleFromIntEvaluator.Factory::new) // CastIntToDoubleEvaluator would be a candidate, but not MV'd ); - public ToDouble(Source source, Expression field) { + @FunctionInfo(returnType = "double") + public ToDouble( + Source source, + @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index 4829d39b09d65..97512a03fe2ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,16 +21,19 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.IP; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; public class ToIP extends AbstractConvertFunction { private static final Map EVALUATORS = Map.ofEntries( Map.entry(IP, (field, source) -> field), - Map.entry(KEYWORD, ToIPFromStringEvaluator.Factory::new) + Map.entry(KEYWORD, ToIPFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToIPFromStringEvaluator.Factory::new) ); - public ToIP(Source source, Expression field) { + @FunctionInfo(returnType = "ip") + public ToIP(Source source, @Param(name = "v", type = { "ip", "keyword", "text" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index 480962ca27f86..a8e4ef804a2ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -9,8 +9,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; -import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -19,7 +20,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeDoubleToLong; import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; @@ -27,7 +27,9 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; public class ToInteger extends AbstractConvertFunction { @@ -36,12 +38,17 @@ public class ToInteger extends AbstractConvertFunction { Map.entry(BOOLEAN, ToIntegerFromBooleanEvaluator.Factory::new), Map.entry(DATETIME, ToIntegerFromLongEvaluator.Factory::new), Map.entry(KEYWORD, ToIntegerFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToIntegerFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToIntegerFromDoubleEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToIntegerFromUnsignedLongEvaluator.Factory::new), Map.entry(LONG, ToIntegerFromLongEvaluator.Factory::new) ); - public ToInteger(Source source, Expression field) { + @FunctionInfo(returnType = "integer") + public ToInteger( + Source source, + @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } @@ -70,7 +77,7 @@ static int fromBoolean(boolean bool) { return bool ? 1 : 0; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class, NumberFormatException.class }) static int fromKeyword(BytesRef in) { String asString = in.utf8ToString(); try { @@ -84,17 +91,22 @@ static int fromKeyword(BytesRef in) { } } - @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) + @ConvertEvaluator(extraName = "FromDouble", warnExceptions = { InvalidArgumentException.class }) static int fromDouble(double dbl) { - return fromLong(safeDoubleToLong(dbl)); + return safeToInt(dbl); } - @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) - static int fromUnsignedLong(long lng) { - return fromLong(ToLong.fromUnsignedLong(lng)); + @ConvertEvaluator(extraName = "FromUnsignedLong", warnExceptions = { InvalidArgumentException.class }) + static int fromUnsignedLong(long ul) { + Number n = unsignedLongAsNumber(ul); + int i = n.intValue(); + if (i != n.longValue()) { + throw new InvalidArgumentException("[{}] out of [integer] range", n); + } + return i; } - @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class, QlIllegalArgumentException.class }) + @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class }) static int fromLong(long lng) { return safeToInt(lng); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index b66ad4f359607..0a2546297f038 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; @@ -29,6 +31,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; @@ -41,12 +44,20 @@ public class ToLong extends AbstractConvertFunction { Map.entry(CARTESIAN_POINT, (fieldEval, source) -> fieldEval), Map.entry(BOOLEAN, ToLongFromBooleanEvaluator.Factory::new), Map.entry(KEYWORD, ToLongFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToLongFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToLongFromDoubleEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToLongFromUnsignedLongEvaluator.Factory::new), Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd ); - public ToLong(Source source, Expression field) { + @FunctionInfo(returnType = "long") + public ToLong( + Source source, + @Param( + name = "v", + type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer", "geo_point", "cartesian_point" } + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index 9f39015a8e063..a1d2e1381109d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -9,6 +9,8 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -37,7 +39,8 @@ public class ToRadians extends AbstractConvertFunction implements EvaluatorMappe ) ); - public ToRadians(Source source, Expression field) { + @FunctionInfo(returnType = "double") + public ToRadians(Source source, @Param(name = "v", type = { "double", "long", "unsigned_long", "integer" }) Expression field) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index a37b2becc8595..41d8f87aee436 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -55,6 +56,7 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new) ); + @FunctionInfo(returnType = "keyword") public ToString( Source source, @Param( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 1b7ee01e50c54..cfa24cd6d8ff8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.InvalidArgumentException; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; import org.elasticsearch.xpack.ql.expression.Expression; @@ -26,6 +28,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.INTEGER; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.type.DataTypes.LONG; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; @@ -38,12 +41,17 @@ public class ToUnsignedLong extends AbstractConvertFunction { Map.entry(DATETIME, ToUnsignedLongFromLongEvaluator.Factory::new), Map.entry(BOOLEAN, ToUnsignedLongFromBooleanEvaluator.Factory::new), Map.entry(KEYWORD, ToUnsignedLongFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToUnsignedLongFromStringEvaluator.Factory::new), Map.entry(DOUBLE, ToUnsignedLongFromDoubleEvaluator.Factory::new), Map.entry(LONG, ToUnsignedLongFromLongEvaluator.Factory::new), Map.entry(INTEGER, ToUnsignedLongFromIntEvaluator.Factory::new) ); - public ToUnsignedLong(Source source, Expression field) { + @FunctionInfo(returnType = "unsigned_long") + public ToUnsignedLong( + Source source, + @Param(name = "v", type = { "boolean", "date", "keyword", "text", "double", "long", "unsigned_long", "integer" }) Expression field + ) { super(source, field); } @@ -72,7 +80,7 @@ static long fromBoolean(boolean bool) { return bool ? ONE_AS_UNSIGNED_LONG : ZERO_AS_UNSIGNED_LONG; } - @ConvertEvaluator(extraName = "FromString", warnExceptions = { NumberFormatException.class }) + @ConvertEvaluator(extraName = "FromString", warnExceptions = { InvalidArgumentException.class, NumberFormatException.class }) static long fromKeyword(BytesRef in) { String asString = in.utf8ToString(); return asLongUnsigned(safeToUnsignedLong(asString)); @@ -83,12 +91,12 @@ static long fromDouble(double dbl) { return asLongUnsigned(safeToUnsignedLong(dbl)); } - @ConvertEvaluator(extraName = "FromLong") + @ConvertEvaluator(extraName = "FromLong", warnExceptions = { InvalidArgumentException.class }) static long fromLong(long lng) { return asLongUnsigned(safeToUnsignedLong(lng)); } - @ConvertEvaluator(extraName = "FromInt") + @ConvertEvaluator(extraName = "FromInt", warnExceptions = { InvalidArgumentException.class }) static long fromInt(int i) { return fromLong(i); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index ad7712f33d947..34e8f695b23c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -31,6 +32,7 @@ public class ToVersion extends AbstractConvertFunction { Map.entry(TEXT, ToVersionFromStringEvaluator.Factory::new) ); + @FunctionInfo(returnType = "version") public ToVersion(Source source, @Param(name = "v", type = { "keyword", "text", "version" }) Expression v) { super(source, v); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index ba63afd8f1e4b..03a385592ac63 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1292,8 +1292,7 @@ public void testRegexOnInt() { public void testUnsupportedTypesWithToString() { // DATE_PERIOD and TIME_DURATION types have been added, but not really patched through the engine; i.e. supported. - final String supportedTypes = "boolean, cartesian_point, datetime, double, geo_point, integer, ip, keyword, long, text, " - + "unsigned_long or version"; + final String supportedTypes = "boolean or cartesian_point or datetime or geo_point or ip or numeric or string or version"; verifyUnsupported( "row period = 1 year | eval to_string(period)", "line 1:28: argument of [to_string(period)] must be [" + supportedTypes + "], found value [period] type [date_period]" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 81f2fa98be8cc..f003170a7551d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -798,13 +798,70 @@ private static String typeErrorMessage(boolean includeOrdinal, List validTypes) { String named = NAMED_EXPECTED_TYPES.get(validTypes); if (named == null) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index c1e9494541636..faf10d499127a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -11,6 +11,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Literal; @@ -219,19 +220,30 @@ public static void forUnaryInt( IntFunction expectedValue, int lowerBound, int upperBound, - List warnings + Function> expectedWarnings ) { unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.INTEGER, intCases(lowerBound, upperBound), expectedType, n -> expectedValue.apply(n.intValue()), - warnings + n -> expectedWarnings.apply(n.intValue()) ); } + public static void forUnaryInt( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + IntFunction expectedValue, + int lowerBound, + int upperBound, + List warnings + ) { + forUnaryInt(suppliers, expectedEvaluatorToString, expectedType, expectedValue, lowerBound, upperBound, unused -> warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#LONG}. */ @@ -242,19 +254,30 @@ public static void forUnaryLong( LongFunction expectedValue, long lowerBound, long upperBound, - List warnings + Function> expectedWarnings ) { unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.LONG, longCases(lowerBound, upperBound), expectedType, n -> expectedValue.apply(n.longValue()), - warnings + expectedWarnings ); } + public static void forUnaryLong( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + LongFunction expectedValue, + long lowerBound, + long upperBound, + List warnings + ) { + forUnaryLong(suppliers, expectedEvaluatorToString, expectedType, expectedValue, lowerBound, upperBound, unused -> warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#UNSIGNED_LONG}. */ @@ -265,19 +288,30 @@ public static void forUnaryUnsignedLong( Function expectedValue, BigInteger lowerBound, BigInteger upperBound, - List warnings + Function> expectedWarnings ) { unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.UNSIGNED_LONG, ulongCases(lowerBound, upperBound), expectedType, n -> expectedValue.apply((BigInteger) n), - warnings + n -> expectedWarnings.apply((BigInteger) n) ); } + public static void forUnaryUnsignedLong( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + BigInteger lowerBound, + BigInteger upperBound, + List warnings + ) { + forUnaryUnsignedLong(suppliers, expectedEvaluatorToString, expectedType, expectedValue, lowerBound, upperBound, unused -> warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#DOUBLE}. */ @@ -289,15 +323,26 @@ public static void forUnaryDouble( double lowerBound, double upperBound, List warnings + ) { + forUnaryDouble(suppliers, expectedEvaluatorToString, expectedType, expectedValue, lowerBound, upperBound, unused -> warnings); + } + + public static void forUnaryDouble( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + DoubleFunction expectedValue, + double lowerBound, + double upperBound, + DoubleFunction> expectedWarnings ) { unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.DOUBLE, doubleCases(lowerBound, upperBound), expectedType, n -> expectedValue.apply(n.doubleValue()), - warnings + n -> expectedWarnings.apply(n.doubleValue()) ); } @@ -311,15 +356,7 @@ public static void forUnaryBoolean( Function expectedValue, List warnings ) { - unary( - suppliers, - expectedEvaluatorToString, - DataTypes.BOOLEAN, - booleanCases(), - expectedType, - v -> expectedValue.apply((Boolean) v), - warnings - ); + unary(suppliers, expectedEvaluatorToString, booleanCases(), expectedType, v -> expectedValue.apply((Boolean) v), warnings); } /** @@ -335,7 +372,6 @@ public static void forUnaryDatetime( unaryNumeric( suppliers, expectedEvaluatorToString, - DataTypes.DATETIME, dateCases(), expectedType, n -> expectedValue.apply(Instant.ofEpochMilli(n.longValue())), @@ -356,7 +392,6 @@ public static void forUnaryGeoPoint( unaryNumeric( suppliers, expectedEvaluatorToString, - EsqlDataTypes.GEO_POINT, geoPointCases(), expectedType, n -> expectedValue.apply(n.longValue()), @@ -377,7 +412,6 @@ public static void forUnaryCartesianPoint( unaryNumeric( suppliers, expectedEvaluatorToString, - EsqlDataTypes.CARTESIAN_POINT, cartesianPointCases(), expectedType, n -> expectedValue.apply(n.longValue()), @@ -395,15 +429,7 @@ public static void forUnaryIp( Function expectedValue, List warnings ) { - unary( - suppliers, - expectedEvaluatorToString, - DataTypes.IP, - ipCases(), - expectedType, - v -> expectedValue.apply((BytesRef) v), - warnings - ); + unary(suppliers, expectedEvaluatorToString, ipCases(), expectedType, v -> expectedValue.apply((BytesRef) v), warnings); } /** @@ -414,21 +440,30 @@ public static void forUnaryStrings( String expectedEvaluatorToString, DataType expectedType, Function expectedValue, - List warnings + Function> expectedWarnings ) { - for (DataType type : EsqlDataTypes.types().stream().filter(EsqlDataTypes::isString).toList()) { + for (DataType type : AbstractConvertFunction.STRING_TYPES) { unary( suppliers, expectedEvaluatorToString, - type, stringCases(type), expectedType, v -> expectedValue.apply((BytesRef) v), - warnings + v -> expectedWarnings.apply((BytesRef) v) ); } } + public static void forUnaryStrings( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + List warnings + ) { + forUnaryStrings(suppliers, expectedEvaluatorToString, expectedType, expectedValue, unused -> warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#VERSION}. */ @@ -442,7 +477,6 @@ public static void forUnaryVersion( unary( suppliers, expectedEvaluatorToString, - DataTypes.VERSION, versionCases(""), expectedType, v -> expectedValue.apply(new Version((BytesRef) v)), @@ -453,31 +487,39 @@ public static void forUnaryVersion( private static void unaryNumeric( List suppliers, String expectedEvaluatorToString, - DataType inputType, List valueSuppliers, DataType expectedOutputType, - Function expected, - List warnings + Function expectedValue, + Function> expectedWarnings ) { unary( suppliers, expectedEvaluatorToString, - inputType, valueSuppliers, expectedOutputType, - v -> expected.apply((Number) v), - warnings + v -> expectedValue.apply((Number) v), + v -> expectedWarnings.apply((Number) v) ); } - private static void unary( + private static void unaryNumeric( List suppliers, String expectedEvaluatorToString, - DataType inputType, List valueSuppliers, DataType expectedOutputType, - Function expected, + Function expected, List warnings + ) { + unaryNumeric(suppliers, expectedEvaluatorToString, valueSuppliers, expectedOutputType, expected, unused -> warnings); + } + + public static void unary( + List suppliers, + String expectedEvaluatorToString, + List valueSuppliers, + DataType expectedOutputType, + Function expectedValue, + Function> expectedWarnings ) { for (TypedDataSupplier supplier : valueSuppliers) { suppliers.add(new TestCaseSupplier(supplier.name(), List.of(supplier.type()), () -> { @@ -492,17 +534,29 @@ private static void unary( List.of(typed), expectedEvaluatorToString, expectedOutputType, - equalTo(expected.apply(value)) + equalTo(expectedValue.apply(value)) ); - for (String warning : warnings) { + for (String warning : expectedWarnings.apply(value)) { testCase = testCase.withWarning(warning); } return testCase; })); } + + } + + public static void unary( + List suppliers, + String expectedEvaluatorToString, + List valueSuppliers, + DataType expectedOutputType, + Function expected, + List warnings + ) { + unary(suppliers, expectedEvaluatorToString, valueSuppliers, expectedOutputType, expected, unused -> warnings); } - private static List intCases(int min, int max) { + public static List intCases(int min, int max) { List cases = new ArrayList<>(); if (0 <= max && 0 >= min) { cases.add(new TypedDataSupplier("<0 int>", () -> 0, DataTypes.INTEGER)); @@ -526,7 +580,7 @@ private static List intCases(int min, int max) { return cases; } - private static List longCases(long min, long max) { + public static List longCases(long min, long max) { List cases = new ArrayList<>(); if (0L <= max && 0L >= min) { cases.add(new TypedDataSupplier("<0 long>", () -> 0L, DataTypes.LONG)); @@ -551,7 +605,7 @@ private static List longCases(long min, long max) { return cases; } - private static List ulongCases(BigInteger min, BigInteger max) { + public static List ulongCases(BigInteger min, BigInteger max) { List cases = new ArrayList<>(); // Zero @@ -591,7 +645,7 @@ private static List ulongCases(BigInteger min, BigInteger max return cases; } - private static List doubleCases(double min, double max) { + public static List doubleCases(double min, double max) { List cases = new ArrayList<>(); // Zeros diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java new file mode 100644 index 0000000000000..b00cecd3f4ccc --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanTests.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.function.Supplier; + +import static java.util.Collections.emptyList; + +public class ToBooleanTests extends AbstractFunctionTestCase { + public ToBooleanTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + final String read = "Attribute[channel=0]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryBoolean(suppliers, read, DataTypes.BOOLEAN, b -> b, emptyList()); + + TestCaseSupplier.forUnaryInt( + suppliers, + "ToBooleanFromIntEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + i -> i != 0, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + emptyList() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + "ToBooleanFromLongEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + l -> l != 0, + Long.MIN_VALUE, + Long.MAX_VALUE, + emptyList() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToBooleanFromUnsignedLongEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + ul -> ul.compareTo(BigInteger.ZERO) != 0, + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + emptyList() + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToBooleanFromDoubleEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + d -> d != 0d, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + emptyList() + ); + TestCaseSupplier.forUnaryStrings( + suppliers, + "ToBooleanFromStringEvaluator[field=" + read + "]", + DataTypes.BOOLEAN, + bytesRef -> String.valueOf(bytesRef).toLowerCase(Locale.ROOT).equals("true"), + emptyList() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToBoolean(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java new file mode 100644 index 0000000000000..c92c8712d1697 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static java.util.Collections.emptyList; + +public class ToDatetimeTests extends AbstractFunctionTestCase { + public ToDatetimeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + final String read = "Attribute[channel=0]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryDatetime(suppliers, read, DataTypes.DATETIME, Instant::toEpochMilli, emptyList()); + + TestCaseSupplier.forUnaryInt( + suppliers, + "ToLongFromIntEvaluator[field=" + read + "]", + DataTypes.DATETIME, + i -> ((Integer) i).longValue(), + Integer.MIN_VALUE, + Integer.MAX_VALUE, + emptyList() + ); + TestCaseSupplier.forUnaryLong(suppliers, read, DataTypes.DATETIME, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, emptyList()); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToLongFromUnsignedLongEvaluator[field=" + read + "]", + DataTypes.DATETIME, + BigInteger::longValueExact, + BigInteger.ZERO, + BigInteger.valueOf(Long.MAX_VALUE), + emptyList() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + "ToLongFromUnsignedLongEvaluator[field=" + read + "]", + DataTypes.DATETIME, + bi -> null, + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TWO), + UNSIGNED_LONG_MAX, + bi -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + bi + "] out of [long] range" + ) + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToLongFromDoubleEvaluator[field=" + read + "]", + DataTypes.DATETIME, + d -> null, + Double.NEGATIVE_INFINITY, + -9.223372036854777E18, // a "convenient" value smaller than `(double) Long.MIN_VALUE` (== ...776E18) + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToLongFromDoubleEvaluator[field=" + read + "]", + DataTypes.DATETIME, + d -> null, + 9.223372036854777E18, // a "convenient" value larger than `(double) Long.MAX_VALUE` (== ...776E18) + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + TestCaseSupplier.forUnaryStrings( + suppliers, + "ToDatetimeFromStringEvaluator[field=" + read + "]", + DataTypes.DATETIME, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: " + + (bytesRef.utf8ToString().isEmpty() + ? "cannot parse empty date" + : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']")) + ) + ); + TestCaseSupplier.unary( + suppliers, + "ToDatetimeFromStringEvaluator[field=" + read + "]", + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + // millis past "0001-01-01T00:00:00.000Z" to match the default formatter + () -> new BytesRef(Instant.ofEpochMilli(randomLongBetween(-62135596800000L, Long.MAX_VALUE)).toString()), + DataTypes.KEYWORD + ) + ), + DataTypes.DATETIME, + bytesRef -> DateParse.DEFAULT_FORMATTER.parseMillis(((BytesRef) bytesRef).utf8ToString()), + emptyList() + ); + TestCaseSupplier.unary( + suppliers, + "ToDatetimeFromStringEvaluator[field=" + read + "]", + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + // millis before "0001-01-01T00:00:00.000Z" + () -> new BytesRef(Instant.ofEpochMilli(randomLongBetween(Long.MIN_VALUE, -62135596800001L)).toString()), + DataTypes.KEYWORD + ) + ), + DataTypes.DATETIME, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: failed to parse date field [" + + ((BytesRef) bytesRef).utf8ToString() + + "] with format [yyyy-MM-dd'T'HH:mm:ss.SSS'Z']" + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToDatetime(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java new file mode 100644 index 0000000000000..a1c3c1f38aac5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +public class ToDegreesTests extends AbstractFunctionTestCase { + public ToDegreesTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + Function evaluatorName = eval -> "ToDegreesEvaluator[field=" + eval + "[field=Attribute[channel=0]]]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("ToDoubleFromIntEvaluator"), + DataTypes.DOUBLE, + Math::toDegrees, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("ToDoubleFromLongEvaluator"), + DataTypes.DOUBLE, + Math::toDegrees, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("ToDoubleFromUnsignedLongEvaluator"), + DataTypes.DOUBLE, + ul -> Math.toDegrees(ul.doubleValue()), + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToDegreesEvaluator[field=Attribute[channel=0]]", + DataTypes.DOUBLE, + Math::toDegrees, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToDegrees(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java new file mode 100644 index 0000000000000..ebcaf367b1226 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +public class ToDoubleTests extends AbstractFunctionTestCase { + public ToDoubleTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + String read = "Attribute[channel=0]"; + Function evaluatorName = s -> "ToDoubleFrom" + s + "Evaluator[field=" + read + "]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryDouble( + suppliers, + read, + DataTypes.DOUBLE, + d -> d, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + List.of() + ); + + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.DOUBLE, b -> b ? 1d : 0d, List.of()); + TestCaseSupplier.forUnaryDatetime( + suppliers, + evaluatorName.apply("Long"), + DataTypes.DOUBLE, + i -> (double) i.toEpochMilli(), + List.of() + ); + // random strings that don't look like a double + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("String"), + DataTypes.DOUBLE, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: " + + (bytesRef.utf8ToString().isEmpty() ? "empty String" : ("For input string: \"" + bytesRef.utf8ToString() + "\"")) + ) + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.DOUBLE, + BigInteger::doubleValue, + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.DOUBLE, + l -> (double) l, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("Int"), + DataTypes.DOUBLE, + i -> (double) i, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + + // strings of random numbers + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.castToDoubleSuppliersFromRange(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.DOUBLE, + bytesRef -> Double.valueOf(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToDouble(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java index 33a85f593ee6f..4294144e1cefe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java @@ -17,16 +17,14 @@ import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.stringCases; +import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; -import static org.hamcrest.Matchers.equalTo; public class ToIPTests extends AbstractFunctionTestCase { public ToIPTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -42,33 +40,27 @@ public static Iterable parameters() { // convert from IP to IP TestCaseSupplier.forUnaryIp(suppliers, read, DataTypes.IP, v -> v, List.of()); - // convert any kind of string to IP, with warnings. - for (TestCaseSupplier.TypedDataSupplier supplier : stringCases(DataTypes.KEYWORD)) { - suppliers.add(new TestCaseSupplier(supplier.name(), List.of(supplier.type()), () -> { - BytesRef value = (BytesRef) supplier.supplier().get(); - TestCaseSupplier.TypedData typed = new TestCaseSupplier.TypedData(value, supplier.type(), "value"); - TestCaseSupplier.TestCase testCase = new TestCaseSupplier.TestCase( - List.of(typed), - stringEvaluator, - DataTypes.IP, - equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning( - "Line -1:-1: java.lang.IllegalArgumentException: '" + value.utf8ToString() + "' is not an IP string literal." - ); - return testCase; - })); - } + // convert random string (i.e. not an IP representation) to IP `null`, with warnings. + TestCaseSupplier.forUnaryStrings( + suppliers, + stringEvaluator, + DataTypes.IP, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: '" + bytesRef.utf8ToString() + "' is not an IP string literal." + ) + ); // convert valid IPs shaped as strings - DataType inputType = DataTypes.KEYWORD; - for (TestCaseSupplier.TypedDataSupplier ipGen : validIPsAsStrings()) { - suppliers.add(new TestCaseSupplier(ipGen.name(), List.of(inputType), () -> { - BytesRef ip = (BytesRef) ipGen.supplier().get(); - TestCaseSupplier.TypedData typed = new TestCaseSupplier.TypedData(ip, inputType, "value"); - return new TestCaseSupplier.TestCase(List.of(typed), stringEvaluator, DataTypes.IP, equalTo(parseIP(ip.utf8ToString()))); - })); - } + TestCaseSupplier.unary( + suppliers, + stringEvaluator, + validIPsAsStrings(), + DataTypes.IP, + bytesRef -> parseIP(((BytesRef) bytesRef).utf8ToString()), + emptyList() + ); // add null as parameter return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java new file mode 100644 index 0000000000000..4402c6d8529b4 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java @@ -0,0 +1,277 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToInt; + +public class ToIntegerTests extends AbstractFunctionTestCase { + public ToIntegerTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + String read = "Attribute[channel=0]"; + Function evaluatorName = s -> "ToIntegerFrom" + s + "Evaluator[field=" + read + "]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryInt(suppliers, read, DataTypes.INTEGER, i -> i, Integer.MIN_VALUE, Integer.MAX_VALUE, List.of()); + + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.INTEGER, b -> b ? 1 : 0, List.of()); + + // datetimes that fall within Integer's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("Long"), + dateCases(0, Integer.MAX_VALUE), + DataTypes.INTEGER, + l -> ((Long) l).intValue(), + List.of() + ); + // datetimes that fall outside Integer's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("Long"), + dateCases(Integer.MAX_VALUE + 1L, Long.MAX_VALUE), + DataTypes.INTEGER, + l -> null, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [integer] range" + ) + ); + // random strings that don't look like an Integer + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("String"), + DataTypes.INTEGER, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + bytesRef.utf8ToString() + "\"" + ) + ); + // from doubles within Integer's range + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.INTEGER, + d -> safeToInt(Math.round(d)), + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + // from doubles outside Integer's range, negative + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.INTEGER, + d -> null, + Double.NEGATIVE_INFINITY, + Integer.MIN_VALUE - 1d, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [integer] range" + ) + ); + // from doubles outside Integer's range, positive + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.INTEGER, + d -> null, + Integer.MAX_VALUE + 1d, + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [integer] range" + ) + ); + + // from unsigned_long within Integer's range + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.INTEGER, + BigInteger::intValue, + BigInteger.ZERO, + BigInteger.valueOf(Integer.MAX_VALUE), + List.of() + ); + // from unsigned_long outside Integer's range + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.INTEGER, + ul -> null, + BigInteger.valueOf(Integer.MAX_VALUE).add(BigInteger.ONE), + UNSIGNED_LONG_MAX, + ul -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + ul + "] out of [integer] range" + + ) + ); + + // from long, within Integer's range + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.INTEGER, + l -> (int) l, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + // from long, outside Integer's range, negative + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.INTEGER, + l -> null, + Long.MIN_VALUE, + Integer.MIN_VALUE - 1L, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [integer] range" + + ) + ); + // from long, outside Integer's range, positive + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.INTEGER, + l -> null, + Integer.MAX_VALUE + 1L, + Long.MAX_VALUE, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [integer] range" + ) + ); + + // strings of random ints within Integer's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.intCases(Integer.MIN_VALUE, Integer.MAX_VALUE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.INTEGER, + bytesRef -> Integer.valueOf(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + // strings of random doubles within Integer's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Integer.MIN_VALUE, Integer.MAX_VALUE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.INTEGER, + bytesRef -> safeToInt(Math.round(Double.parseDouble(((BytesRef) bytesRef).utf8ToString()))), + List.of() + ); + // strings of random doubles outside Integer's range, negative + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Double.NEGATIVE_INFINITY, Integer.MIN_VALUE - 1d) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.INTEGER, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + ) + ); + // strings of random doubles outside Integer's range, positive + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Integer.MAX_VALUE + 1d, Double.POSITIVE_INFINITY) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.INTEGER, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToInteger(source, args.get(0)); + } + + private static List dateCases(long min, long max) { + List dataSuppliers = new ArrayList<>(2); + if (min == 0L) { + dataSuppliers.add(new TestCaseSupplier.TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataTypes.DATETIME)); + } + if (max <= Integer.MAX_VALUE) { + dataSuppliers.add(new TestCaseSupplier.TypedDataSupplier("<1970-01-25T20:31:23.647Z>", () -> 2147483647L, DataTypes.DATETIME)); + } + dataSuppliers.add( + new TestCaseSupplier.TypedDataSupplier("", () -> ESTestCase.randomLongBetween(min, max), DataTypes.DATETIME) + ); + return dataSuppliers; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java new file mode 100644 index 0000000000000..b153fa8489dee --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -0,0 +1,217 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +public class ToLongTests extends AbstractFunctionTestCase { + public ToLongTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + String read = "Attribute[channel=0]"; + Function evaluatorName = s -> "ToLongFrom" + s + "Evaluator[field=" + read + "]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryLong(suppliers, read, DataTypes.LONG, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, List.of()); + + TestCaseSupplier.forUnaryBoolean(suppliers, evaluatorName.apply("Boolean"), DataTypes.LONG, b -> b ? 1L : 0L, List.of()); + + // geo types + TestCaseSupplier.forUnaryGeoPoint(suppliers, read, DataTypes.LONG, i -> i, List.of()); + TestCaseSupplier.forUnaryCartesianPoint(suppliers, read, DataTypes.LONG, i -> i, List.of()); + // datetimes + TestCaseSupplier.forUnaryDatetime(suppliers, read, DataTypes.LONG, Instant::toEpochMilli, List.of()); + // random strings that don't look like a long + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("String"), + DataTypes.LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + bytesRef.utf8ToString() + "\"" + ) + ); + // from doubles within long's range + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.LONG, + Math::round, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + // from doubles outside long's range, negative + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.LONG, + d -> null, + Double.NEGATIVE_INFINITY, + Long.MIN_VALUE - 1d, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + // from doubles outside long's range, positive + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.LONG, + d -> null, + Long.MAX_VALUE + 1d, + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [long] range" + ) + ); + + // from unsigned_long within long's range + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.LONG, + BigInteger::longValue, + BigInteger.ZERO, + BigInteger.valueOf(Long.MAX_VALUE), + List.of() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("UnsignedLong"), + DataTypes.LONG, + ul -> null, + BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE), + UNSIGNED_LONG_MAX, + ul -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + ul + "] out of [long] range" + + ) + ); + + // from integer + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("Int"), + DataTypes.LONG, + l -> (long) l, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + + // strings of random longs + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.longCases(Long.MIN_VALUE, Long.MAX_VALUE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.LONG, + bytesRef -> Long.valueOf(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + // strings of random doubles within long's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Long.MIN_VALUE, Long.MAX_VALUE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.LONG, + bytesRef -> Math.round(Double.parseDouble(((BytesRef) bytesRef).utf8ToString())), + List.of() + ); + // strings of random doubles outside integer's range, negative + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Double.NEGATIVE_INFINITY, Long.MIN_VALUE - 1d) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + ) + ); + // strings of random doubles outside integer's range, positive + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Long.MAX_VALUE + 1d, Double.POSITIVE_INFINITY) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: For input string: \"" + ((BytesRef) bytesRef).utf8ToString() + "\"" + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToLong(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java new file mode 100644 index 0000000000000..ffd1a2734d75f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +public class ToRadiansTests extends AbstractFunctionTestCase { + public ToRadiansTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + Function evaluatorName = eval -> "ToRadiansEvaluator[field=" + eval + "[field=Attribute[channel=0]]]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("ToDoubleFromIntEvaluator"), + DataTypes.DOUBLE, + Math::toRadians, + Integer.MIN_VALUE, + Integer.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("ToDoubleFromLongEvaluator"), + DataTypes.DOUBLE, + Math::toRadians, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + evaluatorName.apply("ToDoubleFromUnsignedLongEvaluator"), + DataTypes.DOUBLE, + ul -> Math.toRadians(ul.doubleValue()), + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + TestCaseSupplier.forUnaryDouble( + suppliers, + "ToRadiansEvaluator[field=Attribute[channel=0]]", + DataTypes.DOUBLE, + Math::toRadians, + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToRadians(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java new file mode 100644 index 0000000000000..080424602703d --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java @@ -0,0 +1,258 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.type.DataTypeConverter.safeToUnsignedLong; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ONE_AS_UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.UNSIGNED_LONG_MAX_AS_DOUBLE; +import static org.elasticsearch.xpack.ql.util.NumericUtils.ZERO_AS_UNSIGNED_LONG; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; + +public class ToUnsignedLongTests extends AbstractFunctionTestCase { + public ToUnsignedLongTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + String read = "Attribute[channel=0]"; + Function evaluatorName = s -> "ToUnsignedLongFrom" + s + "Evaluator[field=" + read + "]"; + List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + read, + DataTypes.UNSIGNED_LONG, + NumericUtils::asLongUnsigned, + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + + TestCaseSupplier.forUnaryBoolean( + suppliers, + evaluatorName.apply("Boolean"), + DataTypes.UNSIGNED_LONG, + b -> b ? ONE_AS_UNSIGNED_LONG : ZERO_AS_UNSIGNED_LONG, + List.of() + ); + + // datetimes + TestCaseSupplier.forUnaryDatetime( + suppliers, + evaluatorName.apply("Long"), + DataTypes.UNSIGNED_LONG, + instant -> asLongUnsigned(instant.toEpochMilli()), + List.of() + ); + // random strings that don't look like an unsigned_long + TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataTypes.UNSIGNED_LONG, bytesRef -> null, bytesRef -> { + // BigDecimal, used to parse unsigned_longs will throw NFEs with different messages depending on empty string, first + // non-number character after a number-looking like prefix, or string starting with "e", maybe others -- safer to take + // this shortcut here. + Exception e = expectThrows(NumberFormatException.class, () -> new BigDecimal(bytesRef.utf8ToString())); + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.NumberFormatException: " + e.getMessage() + ); + }); + // from doubles within unsigned_long's range + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.UNSIGNED_LONG, + d -> asLongUnsigned(BigDecimal.valueOf(d).toBigInteger()), // note: not: new BigDecimal(d).toBigInteger + 0d, + UNSIGNED_LONG_MAX_AS_DOUBLE, + List.of() + ); + // from doubles outside unsigned_long's range, negative + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.UNSIGNED_LONG, + d -> null, + Double.NEGATIVE_INFINITY, + -1d, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [unsigned_long] range" + ) + ); + // from doubles outside Long's range, positive + TestCaseSupplier.forUnaryDouble( + suppliers, + evaluatorName.apply("Double"), + DataTypes.UNSIGNED_LONG, + d -> null, + UNSIGNED_LONG_MAX_AS_DOUBLE + 10e5, + Double.POSITIVE_INFINITY, + d -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + d + "] out of [unsigned_long] range" + ) + ); + + // from long within unsigned_long's range + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.UNSIGNED_LONG, + NumericUtils::asLongUnsigned, + 0L, + Long.MAX_VALUE, + List.of() + ); + // from long outside unsigned_long's range + TestCaseSupplier.forUnaryLong( + suppliers, + evaluatorName.apply("Long"), + DataTypes.UNSIGNED_LONG, + unused -> null, + Long.MIN_VALUE, + -1L, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [unsigned_long] range" + ) + ); + + // from int within unsigned_long's range + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("Int"), + DataTypes.UNSIGNED_LONG, + NumericUtils::asLongUnsigned, + 0, + Integer.MAX_VALUE, + List.of() + ); + // from int outside unsigned_long's range + TestCaseSupplier.forUnaryInt( + suppliers, + evaluatorName.apply("Int"), + DataTypes.UNSIGNED_LONG, + unused -> null, + Integer.MIN_VALUE, + -1, + l -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + l + "] out of [unsigned_long] range" + ) + ); + + // strings of random unsigned_longs + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.ulongCases(BigInteger.ZERO, UNSIGNED_LONG_MAX) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.UNSIGNED_LONG, + bytesRef -> asLongUnsigned(safeToUnsignedLong(((BytesRef) bytesRef).utf8ToString())), + List.of() + ); + // strings of random doubles within unsigned_long's range + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(0, UNSIGNED_LONG_MAX_AS_DOUBLE) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.UNSIGNED_LONG, + bytesRef -> asLongUnsigned(safeToUnsignedLong(((BytesRef) bytesRef).utf8ToString())), + List.of() + ); + // strings of random doubles outside unsigned_long's range, negative + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(Double.NEGATIVE_INFINITY, -1d) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.UNSIGNED_LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + + ((BytesRef) bytesRef).utf8ToString() + + "] out of [unsigned_long] range" + ) + ); + // strings of random doubles outside Integer's range, positive + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("String"), + TestCaseSupplier.doubleCases(UNSIGNED_LONG_MAX_AS_DOUBLE + 10e5, Double.POSITIVE_INFINITY) + .stream() + .map( + tds -> new TestCaseSupplier.TypedDataSupplier( + tds.name() + "as string", + () -> new BytesRef(tds.supplier().get().toString()), + DataTypes.KEYWORD + ) + ) + .toList(), + DataTypes.UNSIGNED_LONG, + bytesRef -> null, + bytesRef -> List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: org.elasticsearch.xpack.ql.InvalidArgumentException: [" + + ((BytesRef) bytesRef).utf8ToString() + + "] out of [unsigned_long] range" + ) + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToUnsignedLong(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java index fefa397f7c77f..c6e2abae14443 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; @@ -24,8 +23,6 @@ import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; - public class ToVersionTests extends AbstractFunctionTestCase { public ToVersionTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); @@ -37,9 +34,12 @@ public static Iterable parameters() { String read = "Attribute[channel=0]"; String stringEvaluator = "ToVersionFromStringEvaluator[field=" + read + "]"; List suppliers = new ArrayList<>(); + // Converting and IP to an IP doesn't change anything. Everything should succeed. - TestCaseSupplier.forUnaryVersion(suppliers, read, DataTypes.VERSION, v -> v.toBytesRef(), List.of()); - // None of the random strings ever look like versions so they should all become "invalid" versions + TestCaseSupplier.forUnaryVersion(suppliers, read, DataTypes.VERSION, Version::toBytesRef, List.of()); + + // None of the random strings ever look like versions so they should all become "invalid" versions: + // https://github.com/elastic/elasticsearch/issues/98989 // TODO should this return null with warnings? they aren't version shaped at all. TestCaseSupplier.forUnaryStrings( suppliers, @@ -48,20 +48,19 @@ public static Iterable parameters() { bytesRef -> new Version(bytesRef.utf8ToString()).toBytesRef(), List.of() ); + // But strings that are shaped like versions do parse to valid versions - for (DataType inputType : EsqlDataTypes.types().stream().filter(EsqlDataTypes::isString).toList()) { - for (TestCaseSupplier.TypedDataSupplier versionGen : TestCaseSupplier.versionCases(inputType.typeName() + " ")) { - suppliers.add(new TestCaseSupplier(versionGen.name(), List.of(inputType), () -> { - BytesRef encodedVersion = (BytesRef) versionGen.supplier().get(); - TestCaseSupplier.TypedData typed = new TestCaseSupplier.TypedData( - new BytesRef(new Version(encodedVersion).toString()), - inputType, - "value" - ); - return new TestCaseSupplier.TestCase(List.of(typed), stringEvaluator, DataTypes.VERSION, equalTo(encodedVersion)); - })); - } + for (DataType inputType : AbstractConvertFunction.STRING_TYPES) { + TestCaseSupplier.unary( + suppliers, + read, + TestCaseSupplier.versionCases(inputType.typeName() + " "), + DataTypes.VERSION, + bytesRef -> new Version((BytesRef) bytesRef).toBytesRef(), + List.of() + ); } + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java index bb7fa9cf8c03a..87f30a89577c2 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/DataTypeConverter.java @@ -382,6 +382,14 @@ public static int safeToInt(long x) { return (int) x; } + public static int safeToInt(double x) { + if (x > Integer.MAX_VALUE || x < Integer.MIN_VALUE) { + throw new InvalidArgumentException("[{}] out of [integer] range", x); + } + // cast is safe, double can represent all of int's range + return (int) Math.round(x); + } + public static long safeDoubleToLong(double x) { if (x > Long.MAX_VALUE || x < Long.MIN_VALUE) { throw new InvalidArgumentException("[{}] out of [long] range", x); From 237db902d20dd017d89bd2f9c9299b6190a12d12 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Mon, 4 Dec 2023 13:03:55 +0000 Subject: [PATCH 173/263] Update to 9.9.0 RC --- build.gradle | 5 ----- 1 file changed, 5 deletions(-) diff --git a/build.gradle b/build.gradle index d10f836db4024..c0b613beefea4 100644 --- a/build.gradle +++ b/build.gradle @@ -195,11 +195,6 @@ if (project.gradle.startParameter.taskNames.any { it.startsWith("checkPart") || subprojects { proj -> apply plugin: 'elasticsearch.base' - - repositories { - // TODO: Temporary for Lucene RC builds. REMOVE - maven { url "https://dist.apache.org/repos/dist/dev/lucene/lucene-9.9.0-RC2-rev-06070c0dceba07f0d33104192d9ac98ca16fc500/lucene/maven" } - } } allprojects { From 84dad0279c728e2b0567e29cf84bb526cccc82bc Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Mon, 4 Dec 2023 08:09:59 -0500 Subject: [PATCH 174/263] [Query Rules] Fix bug where combining the same metadata with text/numeric values leads to error (#102891) * Fix issue where query rule criteria with matching metadata but different types returns error * Update docs/changelog/102891.yaml --- docs/changelog/102891.yaml | 7 ++++ .../test/entsearch/260_rule_query_search.yml | 42 +++++++++++++++++++ .../xpack/application/rules/QueryRule.java | 2 +- .../application/rules/QueryRuleCriteria.java | 9 +++- .../rules/QueryRuleCriteriaType.java | 9 +++- 5 files changed, 65 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/102891.yaml diff --git a/docs/changelog/102891.yaml b/docs/changelog/102891.yaml new file mode 100644 index 0000000000000..c5d5ed8c6758e --- /dev/null +++ b/docs/changelog/102891.yaml @@ -0,0 +1,7 @@ +pr: 102891 +summary: "[Query Rules] Fix bug where combining the same metadata with text/numeric\ + \ values leads to error" +area: Application +type: bug +issues: + - 102827 diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/260_rule_query_search.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/260_rule_query_search.yml index b41636e624674..c287209da5bed 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/260_rule_query_search.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/260_rule_query_search.yml @@ -194,4 +194,46 @@ setup: - match: { hits.hits.0._id: 'doc2' } - match: { hits.hits.1._id: 'doc3' } +--- +"Perform a rule query over a ruleset with combined numeric and text rule matching": + + - do: + query_ruleset.put: + ruleset_id: combined-ruleset + body: + rules: + - rule_id: rule1 + type: pinned + criteria: + - type: fuzzy + metadata: foo + values: [ bar ] + actions: + ids: + - 'doc1' + - rule_id: rule2 + type: pinned + criteria: + - type: lte + metadata: foo + values: [ 100 ] + actions: + ids: + - 'doc2' + - do: + search: + body: + query: + rule_query: + organic: + query_string: + default_field: text + query: blah blah blah + match_criteria: + foo: baz + ruleset_id: combined-ruleset + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: 'doc1' } + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java index 9b2ce393e5b04..9cca42b0402bf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRule.java @@ -294,7 +294,7 @@ public AppliedQueryRules applyRule(AppliedQueryRules appliedRules, Map Date: Mon, 4 Dec 2023 14:27:54 +0100 Subject: [PATCH 175/263] Inference telemetry (#102877) * Empty infenrece usage wiring. * Add fake data * Fix NPE for secretSettings == null * Real inference model stats * New transport version * Code polish * Lint fixes * Update docs/changelog/102877.yaml * Update 102877.yaml * Add inference to yamlRestTest * Declare inference usage action as non-operator * TransportInferenceUsageActionTests * Lint fixes * Replace map by ToXContentObject/Writeable * Polish code * AbstractWireSerializingTestCase --------- Co-authored-by: Elastic Machine --- docs/changelog/102877.yaml | 5 + docs/reference/rest-api/usage.asciidoc | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../core/src/main/java/module-info.java | 1 + .../xpack/core/XPackClientPlugin.java | 3 + .../elasticsearch/xpack/core/XPackField.java | 2 + .../core/action/XPackUsageFeatureAction.java | 2 + .../inference/InferenceFeatureSetUsage.java | 116 +++++++++++++++++ .../InferenceFeatureSetUsageTests.java | 41 ++++++ .../xpack/inference/InferencePlugin.java | 5 +- .../action/TransportInferenceUsageAction.java | 81 ++++++++++++ .../TransportInferenceUsageActionTests.java | 121 ++++++++++++++++++ .../xpack/security/operator/Constants.java | 1 + 13 files changed, 383 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/102877.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsage.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsageTests.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageActionTests.java diff --git a/docs/changelog/102877.yaml b/docs/changelog/102877.yaml new file mode 100644 index 0000000000000..da2de19b19a90 --- /dev/null +++ b/docs/changelog/102877.yaml @@ -0,0 +1,5 @@ +pr: 102877 +summary: Add basic telelemetry for the inference feature +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 959a798378fc6..e2529de75f0e7 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -197,6 +197,11 @@ GET /_xpack/usage }, "node_count" : 1 }, + "inference": { + "available" : true, + "enabled" : true, + "models" : [] + }, "logstash" : { "available" : true, "enabled" : true diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index c392d3b6b4e29..57dc307a75841 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -184,6 +184,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_PROFILE = def(8_551_00_0); public static final TransportVersion CLUSTER_STATS_RESCORER_USAGE_ADDED = def(8_552_00_0); public static final TransportVersion ML_INFERENCE_HF_SERVICE_ADDED = def(8_553_00_0); + public static final TransportVersion INFERENCE_USAGE_ADDED = def(8_554_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/core/src/main/java/module-info.java b/x-pack/plugin/core/src/main/java/module-info.java index 4aa2e145228b8..f747d07224454 100644 --- a/x-pack/plugin/core/src/main/java/module-info.java +++ b/x-pack/plugin/core/src/main/java/module-info.java @@ -75,6 +75,7 @@ exports org.elasticsearch.xpack.core.indexing; exports org.elasticsearch.xpack.core.inference.action; exports org.elasticsearch.xpack.core.inference.results; + exports org.elasticsearch.xpack.core.inference; exports org.elasticsearch.xpack.core.logstash; exports org.elasticsearch.xpack.core.ml.action; exports org.elasticsearch.xpack.core.ml.annotations; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index ac16631bacb73..df19648307a0b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -55,6 +55,7 @@ import org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.ilm.UnfollowAction; import org.elasticsearch.xpack.core.ilm.WaitForSnapshotAction; +import org.elasticsearch.xpack.core.inference.InferenceFeatureSetUsage; import org.elasticsearch.xpack.core.logstash.LogstashFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -133,6 +134,8 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.LOGSTASH, LogstashFeatureSetUsage::new), // ML new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.MACHINE_LEARNING, MachineLearningFeatureSetUsage::new), + // inference + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.INFERENCE, InferenceFeatureSetUsage::new), // monitoring new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.MONITORING, MonitoringFeatureSetUsage::new), // security diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index c8a78af429592..801ef2c463e95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -18,6 +18,8 @@ public final class XPackField { public static final String GRAPH = "graph"; /** Name constant for the machine learning feature. */ public static final String MACHINE_LEARNING = "ml"; + /** Name constant for the inference feature. */ + public static final String INFERENCE = "inference"; /** Name constant for the Logstash feature. */ public static final String LOGSTASH = "logstash"; /** Name constant for the Beats feature. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java index d96fd91ed3f22..c0e6d96c1569a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java @@ -27,6 +27,7 @@ public class XPackUsageFeatureAction extends ActionType modelStats; + + public InferenceFeatureSetUsage(Collection modelStats) { + super(XPackField.INFERENCE, true, true); + this.modelStats = modelStats; + } + + public InferenceFeatureSetUsage(StreamInput in) throws IOException { + super(in); + this.modelStats = in.readCollectionAsList(ModelStats::new); + } + + @Override + protected void innerXContent(XContentBuilder builder, Params params) throws IOException { + super.innerXContent(builder, params); + builder.xContentList("models", modelStats); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeCollection(modelStats); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.INFERENCE_USAGE_ADDED; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsageTests.java new file mode 100644 index 0000000000000..8f64b521c64c9 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/InferenceFeatureSetUsageTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference; + +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; + +public class InferenceFeatureSetUsageTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return InferenceFeatureSetUsage.ModelStats::new; + } + + @Override + protected InferenceFeatureSetUsage.ModelStats createTestInstance() { + RandomStrings.randomAsciiLettersOfLength(random(), 10); + return new InferenceFeatureSetUsage.ModelStats( + randomIdentifier(), + TaskType.values()[randomInt(TaskType.values().length - 1)], + randomInt(10) + ); + } + + @Override + protected InferenceFeatureSetUsage.ModelStats mutateInstance(InferenceFeatureSetUsage.ModelStats modelStats) throws IOException { + InferenceFeatureSetUsage.ModelStats newModelStats = new InferenceFeatureSetUsage.ModelStats(modelStats); + newModelStats.add(); + return newModelStats; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 3adc63c9863cb..e08224aaffdd5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -32,6 +32,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.InferenceAction; @@ -39,6 +40,7 @@ import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; import org.elasticsearch.xpack.inference.action.TransportInferenceAction; +import org.elasticsearch.xpack.inference.action.TransportInferenceUsageAction; import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; @@ -86,7 +88,8 @@ public InferencePlugin(Settings settings) { new ActionHandler<>(InferenceAction.INSTANCE, TransportInferenceAction.class), new ActionHandler<>(GetInferenceModelAction.INSTANCE, TransportGetInferenceModelAction.class), new ActionHandler<>(PutInferenceModelAction.INSTANCE, TransportPutInferenceModelAction.class), - new ActionHandler<>(DeleteInferenceModelAction.INSTANCE, TransportDeleteInferenceModelAction.class) + new ActionHandler<>(DeleteInferenceModelAction.INSTANCE, TransportDeleteInferenceModelAction.class), + new ActionHandler<>(XPackUsageFeatureAction.INFERENCE, TransportInferenceUsageAction.class) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java new file mode 100644 index 0000000000000..54452d8a7ed68 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageAction.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; +import org.elasticsearch.xpack.core.inference.InferenceFeatureSetUsage; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; + +import java.util.Map; +import java.util.TreeMap; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; + +public class TransportInferenceUsageAction extends XPackUsageFeatureTransportAction { + + private final Client client; + + @Inject + public TransportInferenceUsageAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client + ) { + super( + XPackUsageFeatureAction.INFERENCE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); + this.client = new OriginSettingClient(client, ML_ORIGIN); + } + + @Override + protected void masterOperation( + Task task, + XPackUsageRequest request, + ClusterState state, + ActionListener listener + ) throws Exception { + GetInferenceModelAction.Request getInferenceModelAction = new GetInferenceModelAction.Request("_all", TaskType.ANY); + client.execute(GetInferenceModelAction.INSTANCE, getInferenceModelAction, ActionListener.wrap(response -> { + Map stats = new TreeMap<>(); + for (ModelConfigurations model : response.getModels()) { + String statKey = model.getService() + ":" + model.getTaskType().name(); + InferenceFeatureSetUsage.ModelStats stat = stats.computeIfAbsent( + statKey, + key -> new InferenceFeatureSetUsage.ModelStats(model.getService(), model.getTaskType()) + ); + stat.add(); + } + InferenceFeatureSetUsage usage = new InferenceFeatureSetUsage(stats.values()); + listener.onResponse(new XPackUsageFeatureResponse(usage)); + }, listener::onFailure)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageActionTests.java new file mode 100644 index 0000000000000..b0c59fe160be3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceUsageActionTests.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockUtils; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.XPackFeatureSet; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.inference.InferenceFeatureSetUsage; +import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; +import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; +import org.junit.After; +import org.junit.Before; + +import java.util.List; + +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransportInferenceUsageActionTests extends ESTestCase { + + private Client client; + private TransportInferenceUsageAction action; + + @Before + public void init() { + client = mock(Client.class); + ThreadPool threadPool = new TestThreadPool("test"); + when(client.threadPool()).thenReturn(threadPool); + + TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(mock(ThreadPool.class)); + + action = new TransportInferenceUsageAction( + transportService, + mock(ClusterService.class), + mock(ThreadPool.class), + mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), + client + ); + } + + @After + public void close() { + client.threadPool().shutdown(); + } + + public void test() throws Exception { + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse( + new GetInferenceModelAction.Response( + List.of( + new ModelConfigurations("model-001", TaskType.TEXT_EMBEDDING, "openai", mock(ServiceSettings.class)), + new ModelConfigurations("model-002", TaskType.TEXT_EMBEDDING, "openai", mock(ServiceSettings.class)), + new ModelConfigurations("model-003", TaskType.SPARSE_EMBEDDING, "hugging_face_elser", mock(ServiceSettings.class)), + new ModelConfigurations("model-004", TaskType.TEXT_EMBEDDING, "openai", mock(ServiceSettings.class)), + new ModelConfigurations("model-005", TaskType.SPARSE_EMBEDDING, "openai", mock(ServiceSettings.class)), + new ModelConfigurations("model-006", TaskType.SPARSE_EMBEDDING, "hugging_face_elser", mock(ServiceSettings.class)) + ) + ) + ); + return Void.TYPE; + }).when(client).execute(any(GetInferenceModelAction.class), any(), any()); + + PlainActionFuture future = new PlainActionFuture<>(); + action.masterOperation(mock(Task.class), mock(XPackUsageRequest.class), mock(ClusterState.class), future); + + BytesStreamOutput out = new BytesStreamOutput(); + future.get().getUsage().writeTo(out); + XPackFeatureSet.Usage usage = new InferenceFeatureSetUsage(out.bytes().streamInput()); + + assertThat(usage.name(), is(XPackField.INFERENCE)); + assertTrue(usage.enabled()); + assertTrue(usage.available()); + + XContentBuilder builder = XContentFactory.jsonBuilder(); + usage.toXContent(builder, ToXContent.EMPTY_PARAMS); + XContentSource source = new XContentSource(builder); + assertThat(source.getValue("models"), hasSize(3)); + assertThat(source.getValue("models.0.service"), is("hugging_face_elser")); + assertThat(source.getValue("models.0.task_type"), is("SPARSE_EMBEDDING")); + assertThat(source.getValue("models.0.count"), is(2)); + assertThat(source.getValue("models.1.service"), is("openai")); + assertThat(source.getValue("models.1.task_type"), is("SPARSE_EMBEDDING")); + assertThat(source.getValue("models.1.count"), is(1)); + assertThat(source.getValue("models.2.service"), is("openai")); + assertThat(source.getValue("models.2.task_type"), is("TEXT_EMBEDDING")); + assertThat(source.getValue("models.2.count"), is(3)); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5412e7d05f27f..86640e2e1a784 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -416,6 +416,7 @@ public class Constants { "cluster:monitor/xpack/usage/graph", "cluster:monitor/xpack/usage/health_api", "cluster:monitor/xpack/usage/ilm", + "cluster:monitor/xpack/usage/inference", "cluster:monitor/xpack/usage/logstash", "cluster:monitor/xpack/usage/ml", "cluster:monitor/xpack/usage/monitoring", From 8be04463e4ae5795fc3fad45f2d01314eaf81035 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 4 Dec 2023 08:32:54 -0500 Subject: [PATCH 176/263] [ML] Fix text embedding response format for TransportCoordinatedInferenceAction (#102890) * Fix for response format * Adding tests --- .../inference/InferenceServiceResults.java | 11 +++++++++ .../results/SparseEmbeddingResults.java | 5 ++++ .../results/TextEmbeddingResults.java | 8 +++++++ .../results/SparseEmbeddingResultsTests.java | 21 ++++++++++++++++ .../results/TextEmbeddingResultsTests.java | 24 +++++++++++++++++++ .../TransportCoordinatedInferenceAction.java | 2 +- 6 files changed, 70 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java index 37990caeec097..ab5b74faa6530 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceResults.java @@ -16,6 +16,17 @@ public interface InferenceServiceResults extends NamedWriteable, ToXContentFragment { + /** + * Transform the result to match the format required for the TransportCoordinatedInferenceAction. + * For the inference plugin TextEmbeddingResults, the {@link #transformToLegacyFormat()} transforms the + * results into an intermediate format only used by the plugin's return value. It doesn't align with what the + * TransportCoordinatedInferenceAction expects. TransportCoordinatedInferenceAction expects an ml plugin + * TextEmbeddingResults. + * + * For other results like SparseEmbeddingResults, this method can be a pass through to the transformToLegacyFormat. + */ + List transformToCoordinationFormat(); + /** * Transform the result to match the format required for versions prior to * {@link org.elasticsearch.TransportVersions#INFERENCE_SERVICE_RESULTS_ADDED} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java index 20279e82d6c09..910ea5cab214d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java @@ -81,6 +81,11 @@ public Map asMap() { return map; } + @Override + public List transformToCoordinationFormat() { + return transformToLegacyFormat(); + } + @Override public List transformToLegacyFormat() { return embeddings.stream() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java index 7a7ccab2b4daa..ace5974866038 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java @@ -78,6 +78,14 @@ public String getWriteableName() { return NAME; } + @Override + public List transformToCoordinationFormat() { + return embeddings.stream() + .map(embedding -> embedding.values.stream().mapToDouble(value -> value).toArray()) + .map(values -> new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults(TEXT_EMBEDDING, values, false)) + .toList(); + } + @Override @SuppressWarnings("deprecation") public List transformToLegacyFormat() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java index 0a8bfd20caaf1..6f8fa0c453d09 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/SparseEmbeddingResultsTests.java @@ -11,12 +11,14 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; import static org.hamcrest.Matchers.is; public class SparseEmbeddingResultsTests extends AbstractWireSerializingTestCase { @@ -151,6 +153,25 @@ public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws I }""")); } + public void testTransformToCoordinationFormat() { + var results = createSparseResult( + List.of( + createEmbedding(List.of(new SparseEmbeddingResults.WeightedToken("token", 0.1F)), false), + createEmbedding(List.of(new SparseEmbeddingResults.WeightedToken("token2", 0.2F)), true) + ) + ).transformToCoordinationFormat(); + + assertThat( + results, + is( + List.of( + new TextExpansionResults(DEFAULT_RESULTS_FIELD, List.of(new TextExpansionResults.WeightedToken("token", 0.1F)), false), + new TextExpansionResults(DEFAULT_RESULTS_FIELD, List.of(new TextExpansionResults.WeightedToken("token2", 0.2F)), true) + ) + ) + ); + } + public record EmbeddingExpectation(Map tokens, boolean isTruncated) {} public static Map buildExpectation(List embeddings) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java index 71d14e09872fd..09d9894d98853 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java @@ -100,6 +100,30 @@ public void testToXContent_CreatesTheRightFormatForMultipleEmbeddings() throws I }""")); } + public void testTransformToCoordinationFormat() { + var results = new TextEmbeddingResults( + List.of(new TextEmbeddingResults.Embedding(List.of(0.1F, 0.2F)), new TextEmbeddingResults.Embedding(List.of(0.3F, 0.4F))) + ).transformToCoordinationFormat(); + + assertThat( + results, + is( + List.of( + new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( + TextEmbeddingResults.TEXT_EMBEDDING, + new double[] { 0.1F, 0.2F }, + false + ), + new org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults( + TextEmbeddingResults.TEXT_EMBEDDING, + new double[] { 0.3F, 0.4F }, + false + ) + ) + ) + ); + } + @Override protected Writeable.Reader instanceReader() { return TextEmbeddingResults::new; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java index d90c9ec807495..13e04772683eb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java @@ -182,7 +182,7 @@ private void replaceErrorOnMissing( } static InferModelAction.Response translateInferenceServiceResponse(InferenceServiceResults inferenceResults) { - var legacyResults = new ArrayList(inferenceResults.transformToLegacyFormat()); + var legacyResults = new ArrayList(inferenceResults.transformToCoordinationFormat()); return new InferModelAction.Response(legacyResults, null, false); } } From 5b7325b44393246e9852b49f41e08485ae4678cd Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 4 Dec 2023 09:07:42 -0500 Subject: [PATCH 177/263] Fix test failure #102868 (#102889) closes https://github.com/elastic/elasticsearch/issues/102868 --- .../ExceptionSerializationTests.java | 34 ++++++------------- 1 file changed, 10 insertions(+), 24 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 2263bfe78f218..f7362c7001c36 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CancellableThreadsTests; @@ -129,9 +128,8 @@ public class ExceptionSerializationTests extends ESTestCase { - public void testExceptionRegistration() throws ClassNotFoundException, IOException, URISyntaxException { + public void testExceptionRegistration() throws IOException, URISyntaxException { final Set> notRegistered = new HashSet<>(); - final Set> hasDedicatedWrite = new HashSet<>(); final Set> registered = new HashSet<>(); final String path = "/org/elasticsearch"; final Path startPath = PathUtils.get(ElasticsearchException.class.getProtectionDomain().getCodeSource().getLocation().toURI()) @@ -146,13 +144,13 @@ public void testExceptionRegistration() throws ClassNotFoundException, IOExcepti private Path pkgPrefix = PathUtils.get(path).getParent(); @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { pkgPrefix = pkgPrefix.resolve(dir.getFileName()); return FileVisitResult.CONTINUE; } @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { checkFile(file.getFileName().toString()); return FileVisitResult.CONTINUE; } @@ -180,13 +178,6 @@ private void checkClass(Class clazz) { notRegistered.add(clazz); } else if (ElasticsearchException.isRegistered(clazz.asSubclass(Throwable.class), TransportVersion.current())) { registered.add(clazz); - try { - if (clazz.getMethod("writeTo", StreamOutput.class) != null) { - hasDedicatedWrite.add(clazz); - } - } catch (Exception e) { - // fair enough - } } } @@ -199,7 +190,7 @@ private Class loadClass(String filename) throws ClassNotFoundException { for (Path p : pkgPrefix) { pkg.append(p.getFileName().toString()).append("."); } - pkg.append(filename.substring(0, filename.length() - 6)); + pkg.append(filename, 0, filename.length() - 6); return getClass().getClassLoader().loadClass(pkg.toString()); } @@ -209,7 +200,7 @@ public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOExce } @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + public FileVisitResult postVisitDirectory(Path dir, IOException exc) { pkgPrefix = pkgPrefix.getParent(); return FileVisitResult.CONTINUE; } @@ -220,7 +211,7 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOEx Files.walkFileTree(testStartPath, visitor); assertTrue(notRegistered.remove(TestException.class)); assertTrue(notRegistered.remove(UnknownHeaderException.class)); - assertTrue("Classes subclassing ElasticsearchException must be registered \n" + notRegistered.toString(), notRegistered.isEmpty()); + assertTrue("Classes subclassing ElasticsearchException must be registered \n" + notRegistered, notRegistered.isEmpty()); assertTrue(registered.removeAll(ElasticsearchException.getRegisteredKeys())); // check assertEquals(registered.toString(), 0, registered.size()); } @@ -344,7 +335,7 @@ public void testInvalidIndexTemplateException() throws IOException { assertEquals(ex.name(), "foo"); ex = serialize(new InvalidIndexTemplateException(null, "bar")); assertEquals(ex.getMessage(), "index_template [null] invalid, cause [bar]"); - assertEquals(ex.name(), null); + assertNull(ex.name()); } public void testActionTransportException() throws IOException { @@ -353,17 +344,12 @@ public void testActionTransportException() throws IOException { assertEquals("[name?][" + transportAddress + "][ACTION BABY!] message?", ex.getMessage()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102868") public void testSearchContextMissingException() throws IOException { ShardSearchContextId contextId = new ShardSearchContextId(UUIDs.randomBase64UUID(), randomLong()); - TransportVersion version = TransportVersionUtils.randomVersion(random()); + TransportVersion version = TransportVersionUtils.randomCompatibleVersion(random()); SearchContextMissingException ex = serialize(new SearchContextMissingException(contextId), version); assertThat(ex.contextId().getId(), equalTo(contextId.getId())); - if (version.onOrAfter(TransportVersions.V_7_7_0)) { - assertThat(ex.contextId().getSessionId(), equalTo(contextId.getSessionId())); - } else { - assertThat(ex.contextId().getSessionId(), equalTo("")); - } + assertThat(ex.contextId().getSessionId(), equalTo(contextId.getSessionId())); } public void testCircuitBreakingException() throws IOException { @@ -422,7 +408,7 @@ public void testConnectTransportException() throws IOException { } public void testSearchPhaseExecutionException() throws IOException { - ShardSearchFailure[] empty = new ShardSearchFailure[0]; + ShardSearchFailure[] empty = ShardSearchFailure.EMPTY_ARRAY; SearchPhaseExecutionException ex = serialize(new SearchPhaseExecutionException("boom", "baam", new NullPointerException(), empty)); assertEquals("boom", ex.getPhaseName()); assertEquals("baam", ex.getMessage()); From bba08fc97c2c7b783263b5cd6de2e75a8bf42871 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Mon, 4 Dec 2023 15:31:02 +0100 Subject: [PATCH 178/263] Renaming inference rescorer feature flag to learn to rank. (#102883) --- .../org/elasticsearch/test/cluster/FeatureFlag.java | 2 +- x-pack/plugin/ml/qa/basic-multi-node/build.gradle | 2 +- x-pack/plugin/ml/qa/ml-with-security/build.gradle | 4 ++-- .../org/elasticsearch/xpack/ml/MachineLearning.java | 9 ++++----- ...rerFeature.java => LearnToRankRescorerFeature.java} | 10 +++++----- .../org/elasticsearch/xpack/test/rest/XPackRestIT.java | 2 +- 6 files changed, 14 insertions(+), 15 deletions(-) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{InferenceRescorerFeature.java => LearnToRankRescorerFeature.java} (61%) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index b83cc7bba06e5..ff7195f9f5f37 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,7 +16,7 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - INFERENCE_RESCORER("es.inference_rescorer_feature_flag_enabled=true", Version.fromString("8.10.0"), null), + LEARN_TO_RANK("es.learn_to_rank_feature_flag_enabled=true", Version.fromString("8.10.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); public final String systemProperty; diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index fca019a6fc689..bf6ab9ed7d77e 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -17,7 +17,7 @@ testClusters.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' setting 'slm.history_index_enabled', 'false' - requiresFeature 'es.inference_rescorer_feature_flag_enabled', Version.fromString("8.10.0") + requiresFeature 'es.learn_to_rank_feature_flag_enabled', Version.fromString("8.10.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index b28e6bec462b9..b8b706353d624 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -181,7 +181,7 @@ tasks.named("yamlRestTest").configure { 'ml/inference_crud/Test put nlp model config with vocabulary set', 'ml/inference_crud/Test put model model aliases with nlp model', 'ml/inference_processor/Test create processor with missing mandatory fields', - 'ml/inference_rescore/Test rescore with missing model', + 'ml/learn_to_rank_rescorer/Test rescore with missing model', 'ml/inference_stats_crud/Test get stats given missing trained model', 'ml/inference_stats_crud/Test get stats given expression without matches and allow_no_match is false', 'ml/jobs_crud/Test cannot create job with model snapshot id set', @@ -258,5 +258,5 @@ testClusters.configureEach { user username: "no_ml", password: "x-pack-test-password", role: "minimal" setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' - requiresFeature 'es.inference_rescorer_feature_flag_enabled', Version.fromString("8.10.0") + requiresFeature 'es.learn_to_rank_feature_flag_enabled', Version.fromString("8.10.0") } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index db23e7796f862..d0f7302105768 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -324,8 +324,8 @@ import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; -import org.elasticsearch.xpack.ml.inference.ltr.InferenceRescorerFeature; import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankRescorerBuilder; +import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankRescorerFeature; import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankService; import org.elasticsearch.xpack.ml.inference.modelsize.MlModelSizeNamedXContentProvider; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -886,8 +886,7 @@ private static void reportClashingNodeAttribute(String attrName) { @Override public List> getRescorers() { - if (enabled && InferenceRescorerFeature.isEnabled()) { - // Inference rescorer requires access to the model loading service + if (enabled && LearnToRankRescorerFeature.isEnabled()) { return List.of( new RescorerSpec<>( LearnToRankRescorerBuilder.NAME, @@ -1798,7 +1797,7 @@ public List getNamedXContent() { ); namedXContent.addAll(new CorrelationNamedContentProvider().getNamedXContentParsers()); // LTR Combine with Inference named content provider when feature flag is removed - if (InferenceRescorerFeature.isEnabled()) { + if (LearnToRankRescorerFeature.isEnabled()) { namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); } return namedXContent; @@ -1886,7 +1885,7 @@ public List getNamedWriteables() { namedWriteables.addAll(new CorrelationNamedContentProvider().getNamedWriteables()); namedWriteables.addAll(new ChangePointNamedContentProvider().getNamedWriteables()); // LTR Combine with Inference named content provider when feature flag is removed - if (InferenceRescorerFeature.isEnabled()) { + if (LearnToRankRescorerFeature.isEnabled()) { namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); } return namedWriteables; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/InferenceRescorerFeature.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java similarity index 61% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/InferenceRescorerFeature.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java index 8a26714c7c06b..18b2c6fe5ff3f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/InferenceRescorerFeature.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java @@ -10,19 +10,19 @@ import org.elasticsearch.common.util.FeatureFlag; /** - * Inference rescorer feature flag. When the feature is complete, this flag will be removed. + * Learn to rank feature flag. When the feature is complete, this flag will be removed. * * Upon removal, ensure transport serialization is all corrected for future BWC. * * See {@link LearnToRankRescorerBuilder} */ -public class InferenceRescorerFeature { +public class LearnToRankRescorerFeature { - private InferenceRescorerFeature() {} + private LearnToRankRescorerFeature() {} - private static final FeatureFlag INFERENCE_RESCORE_FEATURE_FLAG = new FeatureFlag("inference_rescorer"); + private static final FeatureFlag LEARN_TO_RANK = new FeatureFlag("learn_to_rank"); public static boolean isEnabled() { - return INFERENCE_RESCORE_FEATURE_FLAG.isEnabled(); + return LEARN_TO_RANK.isEnabled(); } } diff --git a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index a0e0fd621ba46..3fd8e952d626e 100644 --- a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -43,7 +43,7 @@ public class XPackRestIT extends AbstractXPackRestTest { .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") .user("x_pack_rest_user", "x-pack-test-password") .feature(FeatureFlag.TIME_SERIES_MODE) - .feature(FeatureFlag.INFERENCE_RESCORER) + .feature(FeatureFlag.LEARN_TO_RANK) .configFile("testnode.pem", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) .configFile("testnode.crt", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .configFile("service_tokens", Resource.fromClasspath("service_tokens")) From 3493ce4ebe75d1c44bd0eb01cf68ca568bae8674 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 4 Dec 2023 15:31:42 +0100 Subject: [PATCH 179/263] [Connector API] Implement update error action (#102841) --- .../api/connector.update_error.json | 39 ++++ .../entsearch/335_connector_update_error.yml | 60 ++++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 52 ++++- .../connector/ConnectorIndexService.java | 31 +++ .../RestUpdateConnectorErrorAction.java | 45 +++++ .../TransportUpdateConnectorErrorAction.java | 52 +++++ .../action/UpdateConnectorErrorAction.java | 186 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 43 ++++ ...ErrorActionRequestBWCSerializingTests.java | 50 +++++ ...rrorActionResponseBWCSerializingTests.java | 42 ++++ .../xpack/security/operator/Constants.java | 1 + 12 files changed, 600 insertions(+), 6 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_error.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json new file mode 100644 index 0000000000000..5d82a3729b501 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json @@ -0,0 +1,39 @@ +{ + "connector.update_error": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the error field in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_error", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object containing the connector's error.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_error.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_error.yml new file mode 100644 index 0000000000000..70021e3899525 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_error.yml @@ -0,0 +1,60 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Error": + - do: + connector.update_error: + connector_id: test-connector + body: + error: "some error" + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { error: "some error" } + +--- +"Update Connector Error - 404 when connector doesn't exist": + - do: + catch: "missing" + connector.update_error: + connector_id: test-non-existent-connector + body: + error: "some error" + +--- +"Update Connector Error - 400 status code when connector_id is empty": + - do: + catch: "bad_request" + connector.update_error: + connector_id: "" + body: + error: "some error" + +--- +"Update Connector Error - 400 status code when payload is not string": + - do: + catch: "bad_request" + connector.update_error: + connector_id: test-connector + body: + error: + field_1: test + field_2: something diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 2a53a46760868..09b86988ffe81 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSyncStatsAction; @@ -59,11 +60,13 @@ import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; @@ -201,6 +204,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), + new ActionHandler<>(UpdateConnectorErrorAction.INSTANCE, TransportUpdateConnectorErrorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), new ActionHandler<>(UpdateConnectorLastSyncStatsAction.INSTANCE, TransportUpdateConnectorLastSyncStatsAction.class), @@ -267,6 +271,7 @@ public List getRestHandlers( new RestGetConnectorAction(), new RestListConnectorAction(), new RestPutConnectorAction(), + new RestUpdateConnectorErrorAction(), new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorLastSeenAction(), new RestUpdateConnectorLastSyncStatsAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 45b906d815aee..d68cc9f7227bc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -200,14 +200,14 @@ public Connector(StreamInput in) throws IOException { public static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); static final ParseField DESCRIPTION_FIELD = new ParseField("description"); - static final ParseField ERROR_FIELD = new ParseField("error"); + public static final ParseField ERROR_FIELD = new ParseField("error"); static final ParseField FEATURES_FIELD = new ParseField("features"); public static final ParseField FILTERING_FIELD = new ParseField("filtering"); public static final ParseField INDEX_NAME_FIELD = new ParseField("index_name"); static final ParseField IS_NATIVE_FIELD = new ParseField("is_native"); public static final ParseField LANGUAGE_FIELD = new ParseField("language"); public static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); - static final ParseField NAME_FIELD = new ParseField("name"); + public static final ParseField NAME_FIELD = new ParseField("name"); public static final ParseField PIPELINE_FIELD = new ParseField("pipeline"); public static final ParseField SCHEDULING_FIELD = new ParseField("scheduling"); public static final ParseField SERVICE_TYPE_FIELD = new ParseField("service_type"); @@ -457,8 +457,28 @@ public String getConnectorId() { return connectorId; } - public ConnectorScheduling getScheduling() { - return scheduling; + public String getApiKeyId() { + return apiKeyId; + } + + public Map getConfiguration() { + return configuration; + } + + public Map getCustomScheduling() { + return customScheduling; + } + + public String getDescription() { + return description; + } + + public String getError() { + return error; + } + + public ConnectorFeatures getFeatures() { + return features; } public List getFiltering() { @@ -469,20 +489,40 @@ public String getIndexName() { return indexName; } + public boolean isNative() { + return isNative; + } + public String getLanguage() { return language; } + public String getName() { + return name; + } + public ConnectorIngestPipeline getPipeline() { return pipeline; } + public ConnectorScheduling getScheduling() { + return scheduling; + } + public String getServiceType() { return serviceType; } - public Map getConfiguration() { - return configuration; + public ConnectorStatus getStatus() { + return status; + } + + public Object getSyncCursor() { + return syncCursor; + } + + public boolean isSyncNow() { + return syncNow; } public ConnectorSyncInfo getSyncInfo() { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index d99ad28dc3970..744a4d2028990 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; @@ -323,6 +324,36 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } } + /** + * Updates the error property of a {@link Connector}. + * + * @param request The request for updating the connector's error. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorError(UpdateConnectorErrorAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + private static ConnectorIndexService.ConnectorResult mapSearchResponseToConnectorList(SearchResponse response) { final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java new file mode 100644 index 0000000000000..ea8bd1b4ee50f --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorErrorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_error_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_error")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorErrorAction.Request request = UpdateConnectorErrorAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorErrorAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorErrorAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java new file mode 100644 index 0000000000000..629fd14861cf6 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorErrorAction extends HandledTransportAction< + UpdateConnectorErrorAction.Request, + UpdateConnectorErrorAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorErrorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorErrorAction.NAME, + transportService, + actionFilters, + UpdateConnectorErrorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorErrorAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorError(request, listener.map(r -> new UpdateConnectorErrorAction.Response(r.getResult()))); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java new file mode 100644 index 0000000000000..c9e48dac08cd5 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java @@ -0,0 +1,186 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorErrorAction extends ActionType { + + public static final UpdateConnectorErrorAction INSTANCE = new UpdateConnectorErrorAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_error"; + + public UpdateConnectorErrorAction() { + super(NAME, UpdateConnectorErrorAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + @Nullable + private final String error; + + public Request(String connectorId, String error) { + this.connectorId = connectorId; + this.error = error; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.error = in.readOptionalString(); + } + + public String getConnectorId() { + return connectorId; + } + + public String getError() { + return error; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_update_error_request", + false, + ((args, connectorId) -> new UpdateConnectorErrorAction.Request(connectorId, (String) args[0])) + ); + + static { + PARSER.declareStringOrNull(optionalConstructorArg(), Connector.ERROR_FIELD); + } + + public static UpdateConnectorErrorAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorErrorAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorErrorAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.ERROR_FIELD.getPreferredName(), error); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalString(error); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(error, request.error); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, error); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index e155cdfefbfa1..0f2c6c3fa3e8e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; @@ -172,6 +173,23 @@ public void testUpdateConnectorScheduling() throws Exception { assertThat(updatedScheduling, equalTo(indexedConnector.getScheduling())); } + public void testUpdateConnectorError() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + UpdateConnectorErrorAction.Request updateErrorRequest = new UpdateConnectorErrorAction.Request( + connector.getConnectorId(), + randomAlphaOfLengthBetween(5, 15) + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorError(updateErrorRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + assertThat(updateErrorRequest.getError(), equalTo(indexedConnector.getError())); + } + private DeleteResponse awaitDeleteConnector(String connectorId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -399,4 +417,29 @@ public void onFailure(Exception e) { assertNotNull("Received null response from update scheduling request", resp.get()); return resp.get(); } + + private UpdateResponse awaitUpdateConnectorError(UpdateConnectorErrorAction.Request updatedError) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorError(updatedError, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update error request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update error request", resp.get()); + return resp.get(); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..94092cee61b40 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionRequestBWCSerializingTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorErrorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorErrorAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorErrorAction.Request::new; + } + + @Override + protected UpdateConnectorErrorAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorErrorAction.Request(connectorId, randomAlphaOfLengthBetween(5, 15)); + } + + @Override + protected UpdateConnectorErrorAction.Request mutateInstance(UpdateConnectorErrorAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorErrorAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorErrorAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorErrorAction.Request mutateInstanceForVersion( + UpdateConnectorErrorAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..a39fcac3d2f04 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorErrorActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorErrorAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorErrorAction.Response::new; + } + + @Override + protected UpdateConnectorErrorAction.Response createTestInstance() { + return new UpdateConnectorErrorAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorErrorAction.Response mutateInstance(UpdateConnectorErrorAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorErrorAction.Response mutateInstanceForVersion( + UpdateConnectorErrorAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 86640e2e1a784..ffc894af423cf 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -127,6 +127,7 @@ public class Constants { "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", + "cluster:admin/xpack/connector/update_error", "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_last_seen", "cluster:admin/xpack/connector/update_last_sync_stats", From fd1e26a4bb3ba4a466fa614f200aa5a57b32b1d4 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 4 Dec 2023 15:37:52 +0100 Subject: [PATCH 180/263] [Enterprise Search] Add GET connector sync job by id (#102908) Add GET connector sync job by id. --- .../api/connector_sync_job.get.json | 32 +++ .../entsearch/440_connector_sync_job_get.yml | 36 +++ .../xpack/application/EnterpriseSearch.java | 5 + .../connector/syncjob/ConnectorSyncJob.java | 271 ++++++++++++++++-- .../syncjob/ConnectorSyncJobIndexService.java | 36 ++- .../action/GetConnectorSyncJobAction.java | 153 ++++++++++ .../action/RestGetConnectorSyncJobAction.java | 42 +++ .../TransportGetConnectorSyncJobAction.java | 55 ++++ .../ConnectorSyncJobIndexServiceTests.java | 112 +++++--- .../syncjob/ConnectorSyncJobTestUtils.java | 9 + .../syncjob/ConnectorSyncJobTests.java | 207 +++++++++++++ ...ncJobActionRequestBWCSerializingTests.java | 47 +++ ...cJobActionResponseBWCSerializingTests.java | 50 ++++ .../GetConnectorSyncJobActionTests.java | 36 +++ ...ansportGetConnectorSyncJobActionTests.java | 75 +++++ .../xpack/security/operator/Constants.java | 1 + 16 files changed, 1103 insertions(+), 64 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionResponseBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json new file mode 100644 index 0000000000000..6eb461ad62128 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json @@ -0,0 +1,32 @@ +{ + "connector_sync_job.get": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Returns the details about a connector sync job." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}", + "methods": [ + "GET" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to be returned." + } + } + } + ] + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml new file mode 100644 index 0000000000000..ade0736436e87 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/440_connector_sync_job_get.yml @@ -0,0 +1,36 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +'Get connector sync job': + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: access_control + trigger_method: scheduled + - set: { id: id } + - match: { id: $id } + - do: + connector_sync_job.get: + connector_sync_job_id: $id + - match: { job_type: access_control } + - match: { trigger_method: scheduled } + +--- +'Get connector sync job - Missing sync job id': + - do: + connector_sync_job.get: + connector_sync_job_id: non-existing-sync-job-id + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 09b86988ffe81..f93177666f3d8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -75,14 +75,17 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; @@ -212,6 +215,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), // SyncJob API + new ActionHandler<>(GetConnectorSyncJobAction.INSTANCE, TransportGetConnectorSyncJobAction.class), new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class), new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), @@ -279,6 +283,7 @@ public List getRestHandlers( new RestUpdateConnectorSchedulingAction(), // SyncJob API + new RestGetConnectorSyncJobAction(), new RestPostConnectorSyncJobAction(), new RestDeleteConnectorSyncJobAction(), new RestCancelConnectorSyncJobAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 6c0e9635d986d..2a302ddb68199 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -7,22 +7,36 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorFiltering; +import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import java.io.IOException; import java.time.Instant; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * Represents a sync job in the Elasticsearch ecosystem. Sync jobs refer to a unit of work, which syncs data from a 3rd party * data source into an Elasticsearch index using the Connectors service. A ConnectorSyncJob always refers @@ -60,7 +74,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ParseField CREATED_AT_FIELD = new ParseField("created_at"); - static final ParseField DELETED_DOCUMENT_COUNT = new ParseField("deleted_document_count"); + static final ParseField DELETED_DOCUMENT_COUNT_FIELD = new ParseField("deleted_document_count"); static final ParseField ERROR_FIELD = new ParseField("error"); @@ -92,6 +106,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ConnectorSyncJobTriggerMethod DEFAULT_TRIGGER_METHOD = ConnectorSyncJobTriggerMethod.ON_DEMAND; + @Nullable private final Instant cancelationRequestedAt; @Nullable @@ -127,7 +142,6 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { private final ConnectorSyncStatus status; - @Nullable private final long totalDocumentCount; private final ConnectorSyncJobTriggerMethod triggerMethod; @@ -217,44 +231,269 @@ public ConnectorSyncJob(StreamInput in) throws IOException { this.workerHostname = in.readOptionalString(); } + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_sync_job", + true, + (args) -> { + int i = 0; + return new Builder().setCancellationRequestedAt((Instant) args[i++]) + .setCanceledAt((Instant) args[i++]) + .setCompletedAt((Instant) args[i++]) + .setConnector((Connector) args[i++]) + .setCreatedAt((Instant) args[i++]) + .setDeletedDocumentCount((Long) args[i++]) + .setError((String) args[i++]) + .setId((String) args[i++]) + .setIndexedDocumentCount((Long) args[i++]) + .setIndexedDocumentVolume((Long) args[i++]) + .setJobType((ConnectorSyncJobType) args[i++]) + .setLastSeen((Instant) args[i++]) + .setMetadata((Map) args[i++]) + .setStartedAt((Instant) args[i++]) + .setStatus((ConnectorSyncStatus) args[i++]) + .setTotalDocumentCount((Long) args[i++]) + .setTriggerMethod((ConnectorSyncJobTriggerMethod) args[i++]) + .setWorkerHostname((String) args[i]) + .build(); + } + ); + + static { + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + CANCELATION_REQUESTED_AT_FIELD, + ObjectParser.ValueType.STRING_OR_NULL + ); + PARSER.declareField(optionalConstructorArg(), (p, c) -> Instant.parse(p.text()), CANCELED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), (p, c) -> Instant.parse(p.text()), COMPLETED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorSyncJob.syncJobConnectorFromXContent(p), + CONNECTOR_FIELD, + ObjectParser.ValueType.OBJECT + ); + PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), CREATED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareLong(constructorArg(), DELETED_DOCUMENT_COUNT_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), ERROR_FIELD); + PARSER.declareString(constructorArg(), ID_FIELD); + PARSER.declareLong(constructorArg(), INDEXED_DOCUMENT_COUNT_FIELD); + PARSER.declareLong(constructorArg(), INDEXED_DOCUMENT_VOLUME_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorSyncJobType.fromString(p.text()), + JOB_TYPE_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareField(constructorArg(), (p, c) -> Instant.parse(p.text()), LAST_SEEN_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField(constructorArg(), (p, c) -> p.map(), METADATA_FIELD, ObjectParser.ValueType.OBJECT); + PARSER.declareField(optionalConstructorArg(), (p, c) -> Instant.parse(p.text()), STARTED_AT_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorSyncStatus.fromString(p.text()), + STATUS_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareLong(constructorArg(), TOTAL_DOCUMENT_COUNT_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> ConnectorSyncJobTriggerMethod.fromString(p.text()), + TRIGGER_METHOD_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareString(optionalConstructorArg(), WORKER_HOSTNAME_FIELD); + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser SYNC_JOB_CONNECTOR_PARSER = new ConstructingObjectParser<>( + "sync_job_connector", + true, + (args) -> { + int i = 0; + return new Connector.Builder().setConnectorId((String) args[i++]) + .setFiltering((List) args[i++]) + .setIndexName((String) args[i++]) + .setLanguage((String) args[i++]) + .setPipeline((ConnectorIngestPipeline) args[i++]) + .setServiceType((String) args[i++]) + .setConfiguration((Map) args[i++]) + .build(); + } + ); + + static { + SYNC_JOB_CONNECTOR_PARSER.declareString(constructorArg(), Connector.ID_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareObjectArray( + optionalConstructorArg(), + (p, c) -> ConnectorFiltering.fromXContent(p), + Connector.FILTERING_FIELD + ); + SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.INDEX_NAME_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.LANGUAGE_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareField( + optionalConstructorArg(), + (p, c) -> ConnectorIngestPipeline.fromXContent(p), + Connector.PIPELINE_FIELD, + ObjectParser.ValueType.OBJECT + ); + SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.SERVICE_TYPE_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareField( + optionalConstructorArg(), + (parser, context) -> parser.map(), + Connector.CONFIGURATION_FIELD, + ObjectParser.ValueType.OBJECT + ); + } + + public static ConnectorSyncJob fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorSyncJob.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector sync job document.", e); + } + } + + public static ConnectorSyncJob fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public static Connector syncJobConnectorFromXContent(XContentParser parser) throws IOException { + return SYNC_JOB_CONNECTOR_PARSER.parse(parser, null); + } + public String getId() { return id; } + public Instant getCancelationRequestedAt() { + return cancelationRequestedAt; + } + + public Instant getCanceledAt() { + return canceledAt; + } + + public Instant getCompletedAt() { + return completedAt; + } + + public Connector getConnector() { + return connector; + } + + public Instant getCreatedAt() { + return createdAt; + } + + public long getDeletedDocumentCount() { + return deletedDocumentCount; + } + + public String getError() { + return error; + } + + public long getIndexedDocumentCount() { + return indexedDocumentCount; + } + + public long getIndexedDocumentVolume() { + return indexedDocumentVolume; + } + + public ConnectorSyncJobType getJobType() { + return jobType; + } + + public Instant getLastSeen() { + return lastSeen; + } + + public Map getMetadata() { + return metadata; + } + + public Instant getStartedAt() { + return startedAt; + } + + public ConnectorSyncStatus getStatus() { + return status; + } + + public long getTotalDocumentCount() { + return totalDocumentCount; + } + + public ConnectorSyncJobTriggerMethod getTriggerMethod() { + return triggerMethod; + } + + public String getWorkerHostname() { + return workerHostname; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), cancelationRequestedAt); - builder.field(CANCELED_AT_FIELD.getPreferredName(), canceledAt); - builder.field(COMPLETED_AT_FIELD.getPreferredName(), completedAt); + if (cancelationRequestedAt != null) { + builder.field(CANCELATION_REQUESTED_AT_FIELD.getPreferredName(), cancelationRequestedAt); + } + if (canceledAt != null) { + builder.field(CANCELED_AT_FIELD.getPreferredName(), canceledAt); + } + if (completedAt != null) { + builder.field(COMPLETED_AT_FIELD.getPreferredName(), completedAt); + } builder.startObject(CONNECTOR_FIELD.getPreferredName()); { builder.field(Connector.ID_FIELD.getPreferredName(), connector.getConnectorId()); - builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getFiltering()); - builder.field(Connector.INDEX_NAME_FIELD.getPreferredName(), connector.getIndexName()); - builder.field(Connector.LANGUAGE_FIELD.getPreferredName(), connector.getLanguage()); - builder.field(Connector.PIPELINE_FIELD.getPreferredName(), connector.getPipeline()); - builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), connector.getServiceType()); - builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), connector.getConfiguration()); + if (connector.getFiltering() != null) { + builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getFiltering()); + } + if (connector.getIndexName() != null) { + builder.field(Connector.INDEX_NAME_FIELD.getPreferredName(), connector.getIndexName()); + } + if (connector.getLanguage() != null) { + builder.field(Connector.LANGUAGE_FIELD.getPreferredName(), connector.getLanguage()); + } + if (connector.getPipeline() != null) { + builder.field(Connector.PIPELINE_FIELD.getPreferredName(), connector.getPipeline()); + } + if (connector.getServiceType() != null) { + builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), connector.getServiceType()); + } + if (connector.getConfiguration() != null) { + builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), connector.getConfiguration()); + } } builder.endObject(); builder.field(CREATED_AT_FIELD.getPreferredName(), createdAt); - builder.field(DELETED_DOCUMENT_COUNT.getPreferredName(), deletedDocumentCount); - builder.field(ERROR_FIELD.getPreferredName(), error); + builder.field(DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), deletedDocumentCount); + if (error != null) { + builder.field(ERROR_FIELD.getPreferredName(), error); + } builder.field(ID_FIELD.getPreferredName(), id); builder.field(INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), indexedDocumentCount); builder.field(INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), indexedDocumentVolume); builder.field(JOB_TYPE_FIELD.getPreferredName(), jobType); - builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); + if (lastSeen != null) { + builder.field(LAST_SEEN_FIELD.getPreferredName(), lastSeen); + } builder.field(METADATA_FIELD.getPreferredName(), metadata); - builder.field(STARTED_AT_FIELD.getPreferredName(), startedAt); + if (startedAt != null) { + builder.field(STARTED_AT_FIELD.getPreferredName(), startedAt); + } builder.field(STATUS_FIELD.getPreferredName(), status); builder.field(TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), totalDocumentCount); builder.field(TRIGGER_METHOD_FIELD.getPreferredName(), triggerMethod); - builder.field(WORKER_HOSTNAME_FIELD.getPreferredName(), workerHostname); + if (workerHostname != null) { + builder.field(WORKER_HOSTNAME_FIELD.getPreferredName(), workerHostname); + } } builder.endObject(); return builder; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index ab593fe99fcee..5e1686dde80f2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; @@ -174,6 +175,40 @@ public void checkInConnectorSyncJob(String connectorSyncJobId, ActionListener listener) { + final GetRequest getRequest = new GetRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).id(connectorSyncJobId).realtime(true); + + try { + clientWithOrigin.get( + getRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, getResponse) -> { + if (getResponse.isExists() == false) { + l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + + try { + final ConnectorSyncJob syncJob = ConnectorSyncJob.fromXContentBytes( + getResponse.getSourceAsBytesRef(), + XContentType.JSON + ); + l.onResponse(syncJob); + } catch (Exception e) { + listener.onFailure(e); + } + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Cancels the {@link ConnectorSyncJob} in the underlying index. * Canceling means to set the {@link ConnectorSyncStatus} to "canceling" and not "canceled" as this is an async operation. @@ -211,7 +246,6 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener { + + public static final GetConnectorSyncJobAction INSTANCE = new GetConnectorSyncJobAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/get"; + + private GetConnectorSyncJobAction() { + super(NAME, GetConnectorSyncJobAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + private final String connectorSyncJobId; + + private static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + } + + public Request(String connectorSyncJobId) { + this.connectorSyncJobId = connectorSyncJobId; + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError( + ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, + validationException + ); + } + + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJobId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorSyncJobId); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_connector_sync_job_request", + false, + (args) -> new Request((String) args[0]) + ); + + static { + PARSER.declareString(constructorArg(), CONNECTOR_ID_FIELD); + } + + public static Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + private final ConnectorSyncJob connectorSyncJob; + + public Response(ConnectorSyncJob connectorSyncJob) { + this.connectorSyncJob = connectorSyncJob; + } + + public Response(StreamInput in) throws IOException { + super(in); + this.connectorSyncJob = new ConnectorSyncJob(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + connectorSyncJob.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return connectorSyncJob.toXContent(builder, params); + } + + public static GetConnectorSyncJobAction.Response fromXContent(XContentParser parser) throws IOException { + return new GetConnectorSyncJobAction.Response(ConnectorSyncJob.fromXContent(parser)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(connectorSyncJob, response.connectorSyncJob); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJob); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java new file mode 100644 index 0000000000000..1f5606810757e --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestGetConnectorSyncJobAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; + +public class RestGetConnectorSyncJobAction extends BaseRestHandler { + @Override + public String getName() { + return "connector_sync_job_get_action"; + } + + @Override + public List routes() { + return List.of( + new Route( + RestRequest.Method.GET, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + GetConnectorSyncJobAction.Request request = new GetConnectorSyncJobAction.Request(restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM)); + return restChannel -> client.execute(GetConnectorSyncJobAction.INSTANCE, request, new RestToXContentListener<>(restChannel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobAction.java new file mode 100644 index 0000000000000..1024b9953fd09 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportGetConnectorSyncJobAction extends HandledTransportAction< + GetConnectorSyncJobAction.Request, + GetConnectorSyncJobAction.Response> { + + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportGetConnectorSyncJobAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + GetConnectorSyncJobAction.NAME, + transportService, + actionFilters, + GetConnectorSyncJobAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute( + Task task, + GetConnectorSyncJobAction.Request request, + ActionListener listener + ) { + connectorSyncJobIndexService.getConnectorSyncJob( + request.getConnectorSyncJobId(), + listener.map(GetConnectorSyncJobAction.Response::new) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index cadc8b761cbe3..8613078e3074e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -80,46 +80,21 @@ public void testCreateConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connector.getConnectorId() ); - PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); - Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); - - String id = (String) connectorSyncJobSource.get(ConnectorSyncJob.ID_FIELD.getPreferredName()); - ConnectorSyncJobType requestJobType = syncJobRequest.getJobType(); - ConnectorSyncJobType jobType = ConnectorSyncJobType.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.JOB_TYPE_FIELD.getPreferredName()) - ); - ConnectorSyncJobTriggerMethod requestTriggerMethod = syncJobRequest.getTriggerMethod(); - ConnectorSyncJobTriggerMethod triggerMethod = ConnectorSyncJobTriggerMethod.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.TRIGGER_METHOD_FIELD.getPreferredName()) - ); - - ConnectorSyncStatus initialStatus = ConnectorSyncStatus.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) - ); - - Instant createdNow = Instant.parse((String) connectorSyncJobSource.get(ConnectorSyncJob.CREATED_AT_FIELD.getPreferredName())); - Instant lastSeen = Instant.parse((String) connectorSyncJobSource.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName())); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); - Integer totalDocumentCount = (Integer) connectorSyncJobSource.get(ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName()); - Integer indexedDocumentCount = (Integer) connectorSyncJobSource.get( - ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName() - ); - Integer indexedDocumentVolume = (Integer) connectorSyncJobSource.get( - ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName() - ); - Integer deletedDocumentCount = (Integer) connectorSyncJobSource.get(ConnectorSyncJob.DELETED_DOCUMENT_COUNT.getPreferredName()); - - assertThat(id, notNullValue()); - assertThat(jobType, equalTo(requestJobType)); - assertThat(triggerMethod, equalTo(requestTriggerMethod)); - assertThat(initialStatus, equalTo(ConnectorSyncJob.DEFAULT_INITIAL_STATUS)); - assertThat(createdNow, equalTo(lastSeen)); - assertThat(totalDocumentCount, equalTo(0)); - assertThat(indexedDocumentCount, equalTo(0)); - assertThat(indexedDocumentVolume, equalTo(0)); - assertThat(deletedDocumentCount, equalTo(0)); + ConnectorSyncJob connectorSyncJob = awaitGetConnectorSyncJob(response.getId()); + + assertThat(connectorSyncJob.getId(), notNullValue()); + assertThat(connectorSyncJob.getJobType(), equalTo(requestJobType)); + assertThat(connectorSyncJob.getTriggerMethod(), equalTo(requestTriggerMethod)); + assertThat(connectorSyncJob.getStatus(), equalTo(ConnectorSyncJob.DEFAULT_INITIAL_STATUS)); + assertThat(connectorSyncJob.getCreatedAt(), equalTo(connectorSyncJob.getLastSeen())); + assertThat(connectorSyncJob.getTotalDocumentCount(), equalTo(0L)); + assertThat(connectorSyncJob.getIndexedDocumentCount(), equalTo(0L)); + assertThat(connectorSyncJob.getIndexedDocumentVolume(), equalTo(0L)); + assertThat(connectorSyncJob.getDeletedDocumentCount(), equalTo(0L)); } public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeToBeSet() throws Exception { @@ -130,12 +105,9 @@ public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeTo ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); - Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); - ConnectorSyncJobType jobType = ConnectorSyncJobType.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.JOB_TYPE_FIELD.getPreferredName()) - ); + ConnectorSyncJob connectorSyncJob = awaitGetConnectorSyncJob(response.getId()); - assertThat(jobType, equalTo(ConnectorSyncJob.DEFAULT_JOB_TYPE)); + assertThat(connectorSyncJob.getJobType(), equalTo(ConnectorSyncJob.DEFAULT_JOB_TYPE)); } public void testCreateConnectorSyncJob_WithMissingTriggerMethod_ExpectDefaultTriggerMethodToBeSet() throws Exception { @@ -146,12 +118,9 @@ public void testCreateConnectorSyncJob_WithMissingTriggerMethod_ExpectDefaultTri ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); - Map connectorSyncJobSource = getConnectorSyncJobSourceById(response.getId()); - ConnectorSyncJobTriggerMethod triggerMethod = ConnectorSyncJobTriggerMethod.fromString( - (String) connectorSyncJobSource.get(ConnectorSyncJob.TRIGGER_METHOD_FIELD.getPreferredName()) - ); + ConnectorSyncJob connectorSyncJob = awaitGetConnectorSyncJob(response.getId()); - assertThat(triggerMethod, equalTo(ConnectorSyncJob.DEFAULT_TRIGGER_METHOD)); + assertThat(connectorSyncJob.getTriggerMethod(), equalTo(ConnectorSyncJob.DEFAULT_TRIGGER_METHOD)); } public void testCreateConnectorSyncJob_WithMissingConnectorId_ExpectException() throws Exception { @@ -184,6 +153,28 @@ public void testDeleteConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testGetConnectorSyncJob() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + ConnectorSyncJobType jobType = syncJobRequest.getJobType(); + ConnectorSyncJobTriggerMethod triggerMethod = syncJobRequest.getTriggerMethod(); + + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + + ConnectorSyncJob syncJob = awaitGetConnectorSyncJob(syncJobId); + + assertThat(syncJob.getId(), equalTo(syncJobId)); + assertThat(syncJob.getJobType(), equalTo(jobType)); + assertThat(syncJob.getTriggerMethod(), equalTo(triggerMethod)); + assertThat(syncJob.getConnector().getConnectorId(), equalTo(connector.getConnectorId())); + } + + public void testGetConnectorSyncJob_WithMissingSyncJobId_ExpectException() { + expectThrows(ResourceNotFoundException.class, () -> awaitGetConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); + } + public void testCheckInConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connector.getConnectorId() @@ -346,6 +337,33 @@ private Map getConnectorSyncJobSourceById(String syncJobId) thro return getResponseActionFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).getSource(); } + private ConnectorSyncJob awaitGetConnectorSyncJob(String connectorSyncJobId) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + + connectorSyncJobIndexService.getConnectorSyncJob(connectorSyncJobId, new ActionListener() { + @Override + public void onResponse(ConnectorSyncJob connectorSyncJob) { + resp.set(connectorSyncJob); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for get request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from get request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitCheckInConnectorSyncJob(String connectorSyncJobId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 4fa1b9122284d..9ec404e109496 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import java.time.Instant; @@ -100,4 +101,12 @@ public static CancelConnectorSyncJobAction.Request getRandomCancelConnectorSyncJ public static CheckInConnectorSyncJobAction.Request getRandomCheckInConnectorSyncJobActionRequest() { return new CheckInConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } + + public static GetConnectorSyncJobAction.Request getRandomGetConnectorSyncJobRequest() { + return new GetConnectorSyncJobAction.Request(randomAlphaOfLength(10)); + } + + public static GetConnectorSyncJobAction.Response getRandomGetConnectorSyncJobResponse() { + return new GetConnectorSyncJobAction.Response(getRandomConnectorSyncJob()); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index aeecf582c9ec7..ace1138b8e987 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -7,15 +7,23 @@ package org.elasticsearch.xpack.application.connector.syncjob; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.junit.Before; import java.io.IOException; +import java.time.Instant; import java.util.List; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; public class ConnectorSyncJobTests extends ESTestCase { @@ -35,6 +43,205 @@ public final void testRandomSerialization() throws IOException { } } + public void testFromXContent_WithAllFields_AllSet() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "cancelation_requested_at": "2023-12-01T14:19:39.394194Z", + "canceled_at": "2023-12-01T14:19:39.394194Z", + "completed_at": "2023-12-01T14:19:39.394194Z", + "connector": { + "connector_id": "connector-id", + "filtering": [ + { + "active": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + ], + "index_name": "search-connector", + "language": "english", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "service_type": "service type", + "configuration": {} + }, + "created_at": "2023-12-01T14:18:43.07693Z", + "deleted_document_count": 10, + "error": "some-error", + "id": "HIC-JYwB9RqKhB7x_hIE", + "indexed_document_count": 10, + "indexed_document_volume": 10, + "job_type": "full", + "last_seen": "2023-12-01T14:18:43.07693Z", + "metadata": {}, + "started_at": "2023-12-01T14:18:43.07693Z", + "status": "canceling", + "total_document_count": 0, + "trigger_method": "scheduled", + "worker_hostname": "worker-hostname" + } + """); + + ConnectorSyncJob syncJob = ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); + + assertThat(syncJob.getCancelationRequestedAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); + assertThat(syncJob.getCanceledAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); + assertThat(syncJob.getCompletedAt(), equalTo(Instant.parse("2023-12-01T14:19:39.394194Z"))); + + assertThat(syncJob.getConnector().getConnectorId(), equalTo("connector-id")); + assertThat(syncJob.getConnector().getFiltering(), hasSize(greaterThan(0))); + assertThat(syncJob.getConnector().getIndexName(), equalTo("search-connector")); + assertThat(syncJob.getConnector().getLanguage(), equalTo("english")); + assertThat(syncJob.getConnector().getPipeline(), notNullValue()); + + assertThat(syncJob.getCreatedAt(), equalTo(Instant.parse("2023-12-01T14:18:43.07693Z"))); + assertThat(syncJob.getDeletedDocumentCount(), equalTo(10L)); + assertThat(syncJob.getError(), equalTo("some-error")); + assertThat(syncJob.getId(), equalTo("HIC-JYwB9RqKhB7x_hIE")); + assertThat(syncJob.getIndexedDocumentCount(), equalTo(10L)); + assertThat(syncJob.getIndexedDocumentVolume(), equalTo(10L)); + assertThat(syncJob.getJobType(), equalTo(ConnectorSyncJobType.FULL)); + assertThat(syncJob.getLastSeen(), equalTo(Instant.parse("2023-12-01T14:18:43.07693Z"))); + assertThat(syncJob.getMetadata(), notNullValue()); + assertThat(syncJob.getStartedAt(), equalTo(Instant.parse("2023-12-01T14:18:43.07693Z"))); + assertThat(syncJob.getStatus(), equalTo(ConnectorSyncStatus.CANCELING)); + assertThat(syncJob.getTotalDocumentCount(), equalTo(0L)); + assertThat(syncJob.getTriggerMethod(), equalTo(ConnectorSyncJobTriggerMethod.SCHEDULED)); + assertThat(syncJob.getWorkerHostname(), equalTo("worker-hostname")); + } + + public void testFromXContent_WithAllNonOptionalFieldsSet_DoesNotThrow() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "connector": { + "connector_id": "connector-id", + "filtering": [ + { + "active": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + ], + "index_name": "search-connector", + "language": "english", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "service_type": "service type", + "configuration": {} + }, + "created_at": "2023-12-01T14:18:43.07693Z", + "deleted_document_count": 10, + "id": "HIC-JYwB9RqKhB7x_hIE", + "indexed_document_count": 10, + "indexed_document_volume": 10, + "job_type": "full", + "last_seen": "2023-12-01T14:18:43.07693Z", + "metadata": {}, + "status": "canceling", + "total_document_count": 0, + "trigger_method": "scheduled" + } + """); + + ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); + } + private void assertTransportSerialization(ConnectorSyncJob testInstance) throws IOException { ConnectorSyncJob deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..c0b7711474a0b --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionRequestBWCSerializingTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class GetConnectorSyncJobActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + GetConnectorSyncJobAction.Request> { + @Override + protected Writeable.Reader instanceReader() { + return GetConnectorSyncJobAction.Request::new; + } + + @Override + protected GetConnectorSyncJobAction.Request createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomGetConnectorSyncJobRequest(); + } + + @Override + protected GetConnectorSyncJobAction.Request mutateInstance(GetConnectorSyncJobAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected GetConnectorSyncJobAction.Request doParseInstance(XContentParser parser) throws IOException { + return GetConnectorSyncJobAction.Request.parse(parser); + } + + @Override + protected GetConnectorSyncJobAction.Request mutateInstanceForVersion( + GetConnectorSyncJobAction.Request instance, + TransportVersion version + ) { + return new GetConnectorSyncJobAction.Request(instance.getConnectorSyncJobId()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..00f6e7cf57fc1 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionResponseBWCSerializingTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; +import java.util.List; + +public class GetConnectorSyncJobActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + GetConnectorSyncJobAction.Response> { + + @Override + public NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(List.of(new NamedWriteableRegistry.Entry(Connector.class, Connector.NAME, Connector::new))); + } + + @Override + protected Writeable.Reader instanceReader() { + return GetConnectorSyncJobAction.Response::new; + } + + @Override + protected GetConnectorSyncJobAction.Response createTestInstance() { + return ConnectorSyncJobTestUtils.getRandomGetConnectorSyncJobResponse(); + } + + @Override + protected GetConnectorSyncJobAction.Response mutateInstance(GetConnectorSyncJobAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected GetConnectorSyncJobAction.Response mutateInstanceForVersion( + GetConnectorSyncJobAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..807f02124f32a --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/GetConnectorSyncJobActionTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class GetConnectorSyncJobActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSyncJobIdIsPresent_ExpectNoValidationError() { + GetConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomGetConnectorSyncJobRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExpectValidationError() { + GetConnectorSyncJobAction.Request requestWithMissingConnectorId = new GetConnectorSyncJobAction.Request(""); + ActionRequestValidationException exception = requestWithMissingConnectorId.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java new file mode 100644 index 0000000000000..7b83d008d92bc --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportGetConnectorSyncJobActionTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportGetConnectorSyncJobActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportGetConnectorSyncJobAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportGetConnectorSyncJobAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testGetConnectorSyncJob_ExpectNoWarnings() throws InterruptedException { + GetConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomGetConnectorSyncJobRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(GetConnectorSyncJobAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for get request", requestTimedOut); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index ffc894af423cf..3409f549cb579 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -136,6 +136,7 @@ public class Constants { "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/check_in", + "cluster:admin/xpack/connector/sync_job/get", "cluster:admin/xpack/connector/sync_job/cancel", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", From af30fe437ba6ba2f3540aa12249220c9d43cbdfb Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 4 Dec 2023 14:39:19 +0000 Subject: [PATCH 181/263] Check for null before overriding task settings (#102918) --- .../embeddings/OpenAiEmbeddingsModel.java | 5 ++++- .../OpenAiEmbeddingsModelTests.java | 19 +++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 5e2c352d88a01..02c1e41e0374a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -84,8 +84,11 @@ public ExecutableAction accept(OpenAiActionVisitor creator, Map } public OpenAiEmbeddingsModel overrideWith(Map taskSettings) { - var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(taskSettings); + if (taskSettings == null || taskSettings.isEmpty()) { + return this; + } + var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap(taskSettings); return new OpenAiEmbeddingsModel(this, getTaskSettings().overrideWith(requestTaskSettings)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index 96ced66723f04..62cb609a59d2a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -14,8 +14,11 @@ import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import java.util.Map; + import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.getRequestTaskSettingsMap; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; public class OpenAiEmbeddingsModelTests extends ESTestCase { @@ -28,6 +31,22 @@ public void testOverrideWith_OverridesUser() { assertThat(overriddenModel, is(createModel("url", "org", "api_key", "model_name", "user_override"))); } + public void testOverrideWith_EmptyMap() { + var model = createModel("url", "org", "api_key", "model_name", null); + + var requestTaskSettingsMap = Map.of(); + + var overriddenModel = model.overrideWith(requestTaskSettingsMap); + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap() { + var model = createModel("url", "org", "api_key", "model_name", null); + + var overriddenModel = model.overrideWith(null); + assertThat(overriddenModel, sameInstance(model)); + } + public static OpenAiEmbeddingsModel createModel( String url, @Nullable String org, From 2eff970cdb6c066b4b8b25f8694b8b40a124c580 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Mon, 4 Dec 2023 16:38:53 +0100 Subject: [PATCH 182/263] [Profiling] Improve tests and fix arm64 handling (#102859) Co-authored-by: Elastic Machine --- .../xpack/profiling/GetStackTracesRequest.java | 2 +- .../xpack/profiling/GetStackTracesRequestTests.java | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java index a6680f08f4684..3ab797e4b16ad 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java @@ -89,7 +89,7 @@ public GetStackTracesRequest( this.customCO2PerKWH = customCO2PerKWH; this.customDatacenterPUE = customDatacenterPUE; this.customPerCoreWattX86 = customPerCoreWattX86; - this.customPerCoreWattARM64 = customPerCoreWattX86; + this.customPerCoreWattARM64 = customPerCoreWattARM64; this.customCostPerCoreHour = customCostPerCoreHour; } diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java index bb4973e75eec8..5b6befbe5a2c2 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java @@ -57,7 +57,13 @@ public void testSerialization() throws IOException { try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry())) { GetStackTracesRequest deserialized = new GetStackTracesRequest(in); assertEquals(sampleSize, deserialized.getSampleSize()); + assertEquals(requestedDuration, deserialized.getRequestedDuration()); assertEquals(awsCostFactor, deserialized.getAwsCostFactor()); + assertEquals(customCO2PerKWH, deserialized.getCustomCO2PerKWH()); + assertEquals(datacenterPUE, deserialized.getCustomDatacenterPUE()); + assertEquals(perCoreWattX86, deserialized.getCustomPerCoreWattX86()); + assertEquals(perCoreWattARM64, deserialized.getCustomPerCoreWattARM64()); + assertEquals(customCostPerCoreHour, deserialized.getCustomCostPerCoreHour()); assertEquals(query, deserialized.getQuery()); } } From aaadd11b2da7aa7c4f3ca092f42cb8576f63b56f Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 4 Dec 2023 17:19:00 +0100 Subject: [PATCH 183/263] Fix CI error in DenseVectorFieldMapperTests (#102914) --- .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index eca673a6dcf24..c417ec995a20a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -790,7 +790,6 @@ public void testMaxDimsByteVector() throws IOException { assertArrayEquals("Parsed vector is not equal to original.", vector, vectorField.vectorValue()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102899") public void testVectorSimilarity() { assertEquals( VectorSimilarityFunction.COSINE, @@ -799,7 +798,11 @@ public void testVectorSimilarity() { assertEquals( VectorSimilarityFunction.COSINE, VectorSimilarity.COSINE.vectorSimilarityFunction( - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, DenseVectorFieldMapper.NORMALIZE_COSINE), + IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.V_8_0_0, + IndexVersionUtils.getPreviousVersion(DenseVectorFieldMapper.NORMALIZE_COSINE) + ), ElementType.FLOAT ) ); From e173b2e6a11cbe83c600e8f90764e5af5f194eda Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 4 Dec 2023 17:43:45 +0100 Subject: [PATCH 184/263] Introduce a StreamOutput that counts how many bytes are written to the stream (#102906) Currently, the `DelayableWriteable` defines a `CountingStreamOutput` that is private to that class. The spatial module would benefit to use a similar implementation to write doc values. Therefore in this PR we propose to move this class to be a first class implementation of the StreamOutput family. This new implementation specialises a few more methods so it should perform better and it is fully tested. --- docs/changelog/102906.yaml | 6 + .../common/geo/SimpleFeatureFactory.java | 9 +- .../io/stream/CountingStreamOutput.java | 88 +++++++++ .../common/io/stream/DelayableWriteable.java | 22 +-- .../common/io/stream/BytesStreamsTests.java | 179 ++++++++++++++---- .../index/fielddata/TriangleTreeWriter.java | 80 ++++---- 6 files changed, 280 insertions(+), 104 deletions(-) create mode 100644 docs/changelog/102906.yaml create mode 100644 server/src/main/java/org/elasticsearch/common/io/stream/CountingStreamOutput.java diff --git a/docs/changelog/102906.yaml b/docs/changelog/102906.yaml new file mode 100644 index 0000000000000..3efaa2db58390 --- /dev/null +++ b/docs/changelog/102906.yaml @@ -0,0 +1,6 @@ +pr: 102906 +summary: Introduce a `StreamOutput` that counts how many bytes are written to the + stream +area: Distributed +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/geo/SimpleFeatureFactory.java b/server/src/main/java/org/elasticsearch/common/geo/SimpleFeatureFactory.java index aafef3f04ebc0..a5a8c2d4ed736 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/SimpleFeatureFactory.java +++ b/server/src/main/java/org/elasticsearch/common/geo/SimpleFeatureFactory.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BitUtil; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.CountingStreamOutput; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils; @@ -178,16 +179,14 @@ private static int encodeCommand(int id, int length) { } private static byte[] writeCommands(final int[] commands, final int type, final int length) throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput()) { + try (BytesStreamOutput output = new BytesStreamOutput(); CountingStreamOutput counting = new CountingStreamOutput()) { for (int i = 0; i < length; i++) { - output.writeVInt(commands[i]); + counting.writeVInt(commands[i]); } - final int dataSize = output.size(); - output.reset(); output.writeVInt(24); output.writeVInt(type); output.writeVInt(34); - output.writeVInt(dataSize); + output.writeVInt(Math.toIntExact(counting.size())); for (int i = 0; i < length; i++) { output.writeVInt(commands[i]); } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/CountingStreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/CountingStreamOutput.java new file mode 100644 index 0000000000000..cc4416444c9ee --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/io/stream/CountingStreamOutput.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.io.stream; + +import java.io.IOException; + +/** + * A reusable @link {@link StreamOutput} that just count how many bytes are written. + */ +public class CountingStreamOutput extends StreamOutput { + private long size; + + /** reset the written byes to 0 */ + public void reset() { + size = 0L; + } + + /** returns how many bytes would have been written */ + public long size() { + return size; + } + + @Override + public void writeByte(byte b) { + ++size; + } + + @Override + public void writeBytes(byte[] b, int offset, int length) { + size += length; + } + + @Override + public void writeInt(int i) { + size += Integer.BYTES; + } + + @Override + public void writeIntArray(int[] values) throws IOException { + writeVInt(values.length); + size += (long) values.length * Integer.BYTES; + } + + @Override + public void writeLong(long i) { + size += Long.BYTES; + } + + @Override + public void writeLongArray(long[] values) throws IOException { + writeVInt(values.length); + size += (long) values.length * Long.BYTES; + } + + @Override + public void writeFloat(float v) { + size += Float.BYTES; + } + + @Override + public void writeFloatArray(float[] values) throws IOException { + writeVInt(values.length); + size += (long) values.length * Float.BYTES; + } + + @Override + public void writeDouble(double v) { + size += Double.BYTES; + } + + @Override + public void writeDoubleArray(double[] values) throws IOException { + writeVInt(values.length); + size += (long) values.length * Double.BYTES; + } + + @Override + public void flush() {} + + @Override + public void close() {} +} diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java b/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java index d24a42ef3fcef..4b3683edf7307 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java @@ -216,7 +216,7 @@ public static long getSerializedSize(Writeable ref) { try (CountingStreamOutput out = new CountingStreamOutput()) { out.setTransportVersion(TransportVersion.current()); ref.writeTo(out); - return out.size; + return out.size(); } catch (IOException exc) { throw new UncheckedIOException(exc); } @@ -237,24 +237,4 @@ private static T deserialize( return reader.read(in); } } - - private static class CountingStreamOutput extends StreamOutput { - long size = 0; - - @Override - public void writeByte(byte b) throws IOException { - ++size; - } - - @Override - public void writeBytes(byte[] b, int offset, int length) throws IOException { - size += length; - } - - @Override - public void flush() throws IOException {} - - @Override - public void close() throws IOException {} - } } diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index 92e8abbe83e16..0e54a9a49aa00 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -53,7 +53,7 @@ */ public class BytesStreamsTests extends ESTestCase { public void testEmpty() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); // test empty stream to array assertEquals(0, out.size()); @@ -63,7 +63,7 @@ public void testEmpty() throws Exception { } public void testSingleByte() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); assertEquals(0, out.size()); int expectedSize = 1; @@ -78,7 +78,7 @@ public void testSingleByte() throws Exception { } public void testSingleShortPage() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = 10; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -95,7 +95,7 @@ public void testSingleShortPage() throws Exception { } public void testIllegalBulkWrite() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); // bulk-write with wrong args expectThrows(IndexOutOfBoundsException.class, () -> out.writeBytes(new byte[] {}, 0, 1)); @@ -103,7 +103,7 @@ public void testIllegalBulkWrite() throws Exception { } public void testSingleShortPageBulkWrite() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); // first bulk-write empty array: should not change anything int expectedSize = 0; @@ -123,7 +123,7 @@ public void testSingleShortPageBulkWrite() throws Exception { } public void testSingleFullPageBulkWrite() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = PageCacheRecycler.BYTE_PAGE_SIZE; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -138,7 +138,7 @@ public void testSingleFullPageBulkWrite() throws Exception { } public void testSingleFullPageBulkWriteWithOffset() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int initialOffset = 10; int additionalLength = PageCacheRecycler.BYTE_PAGE_SIZE; @@ -157,7 +157,7 @@ public void testSingleFullPageBulkWriteWithOffset() throws Exception { } public void testSingleFullPageBulkWriteWithOffsetCrossover() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int initialOffset = 10; int additionalLength = PageCacheRecycler.BYTE_PAGE_SIZE * 2; @@ -176,7 +176,7 @@ public void testSingleFullPageBulkWriteWithOffsetCrossover() throws Exception { } public void testSingleFullPage() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = PageCacheRecycler.BYTE_PAGE_SIZE; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -193,7 +193,7 @@ public void testSingleFullPage() throws Exception { } public void testOneFullOneShortPage() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = PageCacheRecycler.BYTE_PAGE_SIZE + 10; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -210,7 +210,7 @@ public void testOneFullOneShortPage() throws Exception { } public void testTwoFullOneShortPage() throws Exception { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); int expectedSize = (PageCacheRecycler.BYTE_PAGE_SIZE * 2) + 1; byte[] expectedData = randomizedByteArrayWithSize(expectedSize); @@ -263,7 +263,7 @@ public void testSkip() throws Exception { public void testSimpleStreams() throws Exception { assumeTrue("requires a 64-bit JRE ... ?!", Constants.JRE_IS_64BIT); - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); out.writeBoolean(false); out.writeByte((byte) 1); out.writeShort((short) -1); @@ -351,7 +351,7 @@ public String toString() { } public void testNamedWriteable() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( Collections.singletonList( new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new) @@ -385,7 +385,7 @@ public void testNamedWriteableList() throws IOException { expected.add(new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); } - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { out.writeNamedWriteableCollection(expected); try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), namedWriteableRegistry)) { assertEquals(expected, in.readNamedWriteableCollectionAsList(BaseNamedWriteable.class)); @@ -395,7 +395,7 @@ public void testNamedWriteableList() throws IOException { } public void testNamedWriteableNotSupportedWithoutWrapping() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2"); out.writeNamedWriteable(testNamedWriteable); StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); @@ -405,7 +405,7 @@ public void testNamedWriteableNotSupportedWithoutWrapping() throws IOException { } public void testNamedWriteableReaderReturnsNull() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( Collections.singletonList( new NamedWriteableRegistry.Entry(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null) @@ -426,7 +426,7 @@ public void testNamedWriteableReaderReturnsNull() throws IOException { } public void testOptionalWriteableReaderReturnsNull() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { out.writeOptionalWriteable(new TestNamedWriteable(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10))); StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); AssertionError e = expectThrows(AssertionError.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null)); @@ -435,7 +435,7 @@ public void testOptionalWriteableReaderReturnsNull() throws IOException { } public void testWriteableReaderReturnsWrongName() throws IOException { - try (BytesStreamOutput out = new BytesStreamOutput()) { + try (TestStreamOutput out = new TestStreamOutput()) { NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( Collections.singletonList( new NamedWriteableRegistry.Entry( @@ -501,7 +501,7 @@ public void testWriteMap() throws IOException { expected.put(randomAlphaOfLength(2), randomAlphaOfLength(5)); } - final BytesStreamOutput out = new BytesStreamOutput(); + final TestStreamOutput out = new TestStreamOutput(); out.writeMap(expected, StreamOutput::writeString, StreamOutput::writeString); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); final Map loaded = in.readMap(StreamInput::readString, StreamInput::readString); @@ -518,7 +518,7 @@ public void testWriteImmutableMap() throws IOException { } final ImmutableOpenMap expected = expectedBuilder.build(); - final BytesStreamOutput out = new BytesStreamOutput(); + final TestStreamOutput out = new TestStreamOutput(); out.writeMap(expected, StreamOutput::writeString, StreamOutput::writeString); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); final ImmutableOpenMap loaded = in.readImmutableOpenMap(StreamInput::readString, StreamInput::readString); @@ -534,7 +534,7 @@ public void testWriteImmutableMapOfWritable() throws IOException { } final ImmutableOpenMap expected = expectedBuilder.build(); - final BytesStreamOutput out = new BytesStreamOutput(); + final TestStreamOutput out = new TestStreamOutput(); out.writeMap(expected); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); final ImmutableOpenMap loaded = in.readImmutableOpenMap(TestWriteable::new, TestWriteable::new); @@ -550,7 +550,7 @@ public void testWriteMapAsList() throws IOException { expected.put("key_" + value, value); } - final BytesStreamOutput out = new BytesStreamOutput(); + final TestStreamOutput out = new TestStreamOutput(); out.writeMapValues(expected, StreamOutput::writeString); final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); final Map loaded = in.readMapValues(StreamInput::readString, value -> "key_" + value); @@ -674,7 +674,7 @@ public void testWriteMapWithConsistentOrder() throws IOException { assertNotEquals(mapKeys, reverseMapKeys); - try (BytesStreamOutput output = new BytesStreamOutput(); BytesStreamOutput reverseMapOutput = new BytesStreamOutput()) { + try (TestStreamOutput output = new TestStreamOutput(); TestStreamOutput reverseMapOutput = new TestStreamOutput()) { output.writeMapWithConsistentOrder(map); reverseMapOutput.writeMapWithConsistentOrder(reverseMap); @@ -689,7 +689,7 @@ public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException { () -> randomAlphaOfLength(5), () -> randomAlphaOfLength(5) ); - try (BytesStreamOutput streamOut = new BytesStreamOutput()) { + try (TestStreamOutput streamOut = new TestStreamOutput()) { streamOut.writeMapWithConsistentOrder(streamOutMap); StreamInput in = StreamInput.wrap(BytesReference.toBytes(streamOut.bytes())); Map streamInMap = in.readMap(); @@ -698,7 +698,7 @@ public void testReadMapByUsingWriteMapWithConsistentOrder() throws IOException { } public void testWriteMapWithConsistentOrderWithLinkedHashMapShouldThrowAssertError() throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput()) { + try (TestStreamOutput output = new TestStreamOutput()) { Map map = new LinkedHashMap<>(); Throwable e = expectThrows(AssertionError.class, () -> output.writeMapWithConsistentOrder(map)); assertEquals(AssertionError.class, e.getClass()); @@ -715,7 +715,7 @@ public void testWriteRandomStrings() throws IOException { for (int iter = 0; iter < iters; iter++) { List strings = new ArrayList<>(); int numStrings = randomIntBetween(100, 1000); - BytesStreamOutput output = new BytesStreamOutput(0); + TestStreamOutput output = new TestStreamOutput(); for (int i = 0; i < numStrings; i++) { String s = randomRealisticUnicodeOfLengthBetween(0, 2048); strings.add(s); @@ -739,7 +739,7 @@ public void testWriteLargeSurrogateOnlyString() throws IOException { assertEquals(2, deseretLetter.length()); String largeString = IntStream.range(0, 2048).mapToObj(s -> deseretLetter).collect(Collectors.joining("")).trim(); assertEquals("expands to 4 bytes", 4, new BytesRef(deseretLetter).length); - try (BytesStreamOutput output = new BytesStreamOutput(0)) { + try (TestStreamOutput output = new TestStreamOutput()) { output.writeString(largeString); try (StreamInput streamInput = output.bytes().streamInput()) { assertEquals(largeString, streamInput.readString()); @@ -748,7 +748,7 @@ public void testWriteLargeSurrogateOnlyString() throws IOException { } public void testReadTooLargeArraySize() throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput(0)) { + try (TestStreamOutput output = new TestStreamOutput()) { output.writeVInt(10); for (int i = 0; i < 10; i++) { output.writeInt(i); @@ -769,7 +769,7 @@ public void testReadTooLargeArraySize() throws IOException { } public void testReadCorruptedArraySize() throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput(0)) { + try (TestStreamOutput output = new TestStreamOutput()) { output.writeVInt(10); for (int i = 0; i < 10; i++) { output.writeInt(i); @@ -791,7 +791,7 @@ public void testReadCorruptedArraySize() throws IOException { } public void testReadNegativeArraySize() throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput(0)) { + try (TestStreamOutput output = new TestStreamOutput()) { output.writeVInt(10); for (int i = 0; i < 10; i++) { output.writeInt(i); @@ -814,10 +814,10 @@ public void testReadNegativeArraySize() throws IOException { public void testVInt() throws IOException { final int value = randomInt(); - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); output.writeVInt(value); - BytesStreamOutput simple = new BytesStreamOutput(); + TestStreamOutput simple = new TestStreamOutput(); int i = value; while ((i & ~0x7F) != 0) { simple.writeByte(((byte) ((i & 0x7f) | 0x80))); @@ -834,14 +834,14 @@ public void testVLong() throws IOException { final long value = randomLong(); { // Read works for positive and negative numbers - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); output.writeVLongNoCheck(value); // Use NoCheck variant so we can write negative numbers StreamInput input = output.bytes().streamInput(); assertEquals(value, input.readVLong()); } if (value < 0) { // Write doesn't work for negative numbers - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); Exception e = expectThrows(IllegalStateException.class, () -> output.writeVLong(value)); assertEquals("Negative longs unsupported, use writeLong or writeZLong for negative numbers [" + value + "]", e.getMessage()); } @@ -855,7 +855,7 @@ public enum TestEnum { public void testEnum() throws IOException { TestEnum value = randomFrom(TestEnum.values()); - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); output.writeEnum(value); StreamInput input = output.bytes().streamInput(); assertEquals(value, input.readEnum(TestEnum.class)); @@ -863,7 +863,7 @@ public void testEnum() throws IOException { } public void testInvalidEnum() throws IOException { - BytesStreamOutput output = new BytesStreamOutput(); + TestStreamOutput output = new TestStreamOutput(); int randomNumber = randomInt(); boolean validEnum = randomNumber >= 0 && randomNumber < TestEnum.values().length; output.writeVInt(randomNumber); @@ -878,7 +878,7 @@ public void testInvalidEnum() throws IOException { } private static void assertEqualityAfterSerialize(TimeValue value, int expectedSize) throws IOException { - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); out.writeTimeValue(value); assertEquals(expectedSize, out.size()); @@ -897,8 +897,111 @@ public void testTimeValueSerialize() throws Exception { assertEqualityAfterSerialize(TimeValue.timeValueSeconds(30), 2); final TimeValue timeValue = new TimeValue(randomIntBetween(0, 1024), randomFrom(TimeUnit.values())); - BytesStreamOutput out = new BytesStreamOutput(); + TestStreamOutput out = new TestStreamOutput(); out.writeZLong(timeValue.duration()); assertEqualityAfterSerialize(timeValue, 1 + out.bytes().length()); } + + private static class TestStreamOutput extends BytesStream { + + private final BytesStreamOutput output = new BytesStreamOutput(); + private final CountingStreamOutput counting = new CountingStreamOutput(); + + @Override + public void writeByte(byte b) { + output.writeByte(b); + counting.writeByte(b); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeBytes(byte[] b, int offset, int length) { + output.writeBytes(b, offset, length); + counting.writeBytes(b, offset, length); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeInt(int i) throws IOException { + output.writeInt(i); + counting.writeInt(i); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeIntArray(int[] values) throws IOException { + output.writeIntArray(values); + counting.writeIntArray(values); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeLong(long i) throws IOException { + output.writeLong(i); + counting.writeLong(i); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeLongArray(long[] values) throws IOException { + output.writeLongArray(values); + counting.writeLongArray(values); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeFloat(float v) throws IOException { + output.writeFloat(v); + counting.writeFloat(v); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeFloatArray(float[] values) throws IOException { + output.writeFloatArray(values); + counting.writeFloatArray(values); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeDouble(double v) throws IOException { + output.writeDouble(v); + counting.writeDouble(v); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void writeDoubleArray(double[] values) throws IOException { + output.writeDoubleArray(values); + counting.writeDoubleArray(values); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public BytesReference bytes() { + BytesReference bytesReference = output.bytes(); + assertThat((long) bytesReference.length(), equalTo(counting.size())); + return bytesReference; + } + + public int size() { + int size = output.size(); + assertThat((long) size, equalTo(counting.size())); + return size; + } + + @Override + public void flush() { + output.flush(); + counting.flush(); + assertThat((long) output.size(), equalTo(counting.size())); + } + + @Override + public void close() { + assertThat((long) output.size(), equalTo(counting.size())); + output.close(); + counting.close(); + } + } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java index 9e1ebd8a75b43..a69f0f6d73365 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java @@ -11,7 +11,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.CountingStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; @@ -124,31 +124,31 @@ private TriangleTreeNode(ShapeField.DecodedTriangle component) { } private void writeTo(StreamOutput out) throws IOException { - BytesStreamOutput scratchBuffer = new BytesStreamOutput(); + CountingStreamOutput countingBuffer = new CountingStreamOutput(); writeMetadata(out); writeComponent(out); if (left != null) { - left.writeNode(out, maxX, maxY, scratchBuffer); + left.writeNode(out, maxX, maxY, countingBuffer); } if (right != null) { - right.writeNode(out, maxX, maxY, scratchBuffer); + right.writeNode(out, maxX, maxY, countingBuffer); } } - private void writeNode(StreamOutput out, int parentMaxX, int parentMaxY, BytesStreamOutput scratchBuffer) throws IOException { + private void writeNode(StreamOutput out, int parentMaxX, int parentMaxY, CountingStreamOutput countingBuffer) throws IOException { out.writeVLong((long) parentMaxX - maxX); out.writeVLong((long) parentMaxY - maxY); - int size = nodeSize(false, parentMaxX, parentMaxY, scratchBuffer); - out.writeVInt(size); + long size = nodeSize(false, parentMaxX, parentMaxY, countingBuffer); + out.writeVInt(Math.toIntExact(size)); writeMetadata(out); writeComponent(out); if (left != null) { - left.writeNode(out, maxX, maxY, scratchBuffer); + left.writeNode(out, maxX, maxY, countingBuffer); } if (right != null) { - int rightSize = right.nodeSize(true, maxX, maxY, scratchBuffer); - out.writeVInt(rightSize); - right.writeNode(out, maxX, maxY, scratchBuffer); + long rightSize = right.nodeSize(true, maxX, maxY, countingBuffer); + out.writeVInt(Math.toIntExact(rightSize)); + right.writeNode(out, maxX, maxY, countingBuffer); } } @@ -184,50 +184,50 @@ private void writeComponent(StreamOutput out) throws IOException { out.writeVLong((long) maxY - component.cY); } - private int nodeSize(boolean includeBox, int parentMaxX, int parentMaxY, BytesStreamOutput scratchBuffer) throws IOException { - int size = 0; + private long nodeSize(boolean includeBox, int parentMaxX, int parentMaxY, CountingStreamOutput countingBuffer) throws IOException { + long size = 0; size++; // metadata - size += componentSize(scratchBuffer); + size += componentSize(countingBuffer); if (left != null) { - size += left.nodeSize(true, maxX, maxY, scratchBuffer); + size += left.nodeSize(true, maxX, maxY, countingBuffer); } if (right != null) { - int rightSize = right.nodeSize(true, maxX, maxY, scratchBuffer); - scratchBuffer.reset(); - scratchBuffer.writeVLong(rightSize); - size += scratchBuffer.size(); // jump size + long rightSize = right.nodeSize(true, maxX, maxY, countingBuffer); + countingBuffer.reset(); + countingBuffer.writeVLong(rightSize); + size += countingBuffer.size(); // jump size size += rightSize; } if (includeBox) { - int jumpSize = size; - scratchBuffer.reset(); - scratchBuffer.writeVLong((long) parentMaxX - maxX); - scratchBuffer.writeVLong((long) parentMaxY - maxY); - scratchBuffer.writeVLong(jumpSize); - size += scratchBuffer.size(); // box size + long jumpSize = size; + countingBuffer.reset(); + countingBuffer.writeVLong((long) parentMaxX - maxX); + countingBuffer.writeVLong((long) parentMaxY - maxY); + countingBuffer.writeVLong(jumpSize); + size += countingBuffer.size(); // box size } return size; } - private int componentSize(BytesStreamOutput scratchBuffer) throws IOException { - scratchBuffer.reset(); + private long componentSize(CountingStreamOutput countingBuffer) throws IOException { + countingBuffer.reset(); if (component.type == ShapeField.DecodedTriangle.TYPE.POINT) { - scratchBuffer.writeVLong((long) maxX - component.aX); - scratchBuffer.writeVLong((long) maxY - component.aY); + countingBuffer.writeVLong((long) maxX - component.aX); + countingBuffer.writeVLong((long) maxY - component.aY); } else if (component.type == ShapeField.DecodedTriangle.TYPE.LINE) { - scratchBuffer.writeVLong((long) maxX - component.aX); - scratchBuffer.writeVLong((long) maxY - component.aY); - scratchBuffer.writeVLong((long) maxX - component.bX); - scratchBuffer.writeVLong((long) maxY - component.bY); + countingBuffer.writeVLong((long) maxX - component.aX); + countingBuffer.writeVLong((long) maxY - component.aY); + countingBuffer.writeVLong((long) maxX - component.bX); + countingBuffer.writeVLong((long) maxY - component.bY); } else { - scratchBuffer.writeVLong((long) maxX - component.aX); - scratchBuffer.writeVLong((long) maxY - component.aY); - scratchBuffer.writeVLong((long) maxX - component.bX); - scratchBuffer.writeVLong((long) maxY - component.bY); - scratchBuffer.writeVLong((long) maxX - component.cX); - scratchBuffer.writeVLong((long) maxY - component.cY); + countingBuffer.writeVLong((long) maxX - component.aX); + countingBuffer.writeVLong((long) maxY - component.aY); + countingBuffer.writeVLong((long) maxX - component.bX); + countingBuffer.writeVLong((long) maxY - component.bY); + countingBuffer.writeVLong((long) maxX - component.cX); + countingBuffer.writeVLong((long) maxY - component.cY); } - return Math.toIntExact(scratchBuffer.size()); + return countingBuffer.size(); } } } From 4c2f23f30feb065f7bbd901f1774512def777038 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Mon, 4 Dec 2023 12:35:23 -0500 Subject: [PATCH 185/263] [ML] Correct ML exceptions to use RestStatus (#102781) * added exception status for JobResultsProvider * InferenceRunner - Throw statusException if a statusException is caught * TransportDeleteForecastAction - throw if caught status exception * ExpiredForecastsRemover - replaced failed search with status exception; added throw if status exception * ExpiredResultsRemover - throw if statusException * ExpiredModelSnapshotsRemover replaced exception with too many requests * ExpiredAnnotationsRemover too many requests * ProcessContext changed to statusException with too many requests for tryLock failure * ChunkedTrainedModelRestorer - Changed to status exception with too many requests * updating exceptions to have status codes in ml code for handling action failures * add status ioException in MachineLearning.java * changed exceptions caused by incomplete upgrades to include status RequestTimeout(408) * updated handling of exception collections * Added too_many_requests for ElasticsearchMappings * Added Request_Timeout status for failed datafeed job cleanup in TransportPutJobAction * Added RequstTimeout status for failed query parsing in DataFrameAnalyticsSource * Added InternalServerError status for negative pipeline count in GetTrainedMdoelStatsAction * removed assertion in ExceptionCollectionHandling --- .../dataframe/DataFrameAnalyticsSource.java | 9 ++- .../persistence/ElasticsearchMappings.java | 8 ++- ...rtCancelJobModelSnapshotUpgradeAction.java | 8 ++- .../ml/action/TransportCloseJobAction.java | 9 +-- .../action/TransportDeleteForecastAction.java | 12 +++- .../ml/action/TransportPutJobAction.java | 6 +- ...TransportStopDataFrameAnalyticsAction.java | 9 +-- .../action/TransportStopDatafeedAction.java | 9 +-- ...ransportUpgradeJobModelSnapshotAction.java | 6 +- .../CategorizeTextAggregationBuilder.java | 13 ++-- .../InternalCategorizationAggregation.java | 23 ++++--- .../datafeed/DatafeedConfigAutoUpdater.java | 19 +++++- .../dataframe/inference/InferenceRunner.java | 12 ++-- .../ChunkedTrainedModelRestorer.java | 6 +- .../job/persistence/JobResultsProvider.java | 24 +++++-- .../process/autodetect/ProcessContext.java | 5 +- .../retention/ExpiredAnnotationsRemover.java | 11 +++- .../retention/ExpiredForecastsRemover.java | 26 +++++++- .../ExpiredModelSnapshotsRemover.java | 12 +++- .../job/retention/ExpiredResultsRemover.java | 20 +++++- .../ml/utils/ExceptionCollectionHandling.java | 64 +++++++++++++++++++ .../persistence/ResultsPersisterService.java | 7 +- 22 files changed, 252 insertions(+), 66 deletions(-) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java index e6b263abd0f01..9c326f067caf7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsSource.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.dataframe; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -14,6 +14,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -171,7 +172,11 @@ public QueryBuilder getParsedQuery() { if (exception instanceof RuntimeException runtimeException) { throw runtimeException; } else { - throw new ElasticsearchException(queryProvider.getParsingException()); + throw new ElasticsearchStatusException( + queryProvider.getParsingException().getMessage(), + RestStatus.BAD_REQUEST, + queryProvider.getParsingException() + ); } } return queryProvider.getParsedQuery(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index 1dde9dc6075d0..4187762ca58c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; @@ -23,6 +23,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transports; import org.elasticsearch.xcontent.XContentType; @@ -189,10 +190,11 @@ protected void doRun() throws Exception { listener.onResponse(true); } else { listener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "Attempt to put missing mapping in indices " + Arrays.toString(indicesThatRequireAnUpdate) - + " was not acknowledged" + + " was not acknowledged", + RestStatus.TOO_MANY_REQUESTS ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java index aa28135787b5d..2dcb9c5dfe705 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCancelJobModelSnapshotUpgradeAction.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -37,6 +37,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; + public class TransportCancelJobModelSnapshotUpgradeAction extends HandledTransportAction { private static final Logger logger = LogManager.getLogger(TransportCancelJobModelSnapshotUpgradeAction.class); @@ -134,11 +136,11 @@ private void sendResponseOrFailure(ActionListener listener, AtomicArra + request.getJobId() + "]. Total failures [" + caughtExceptions.size() - + "], rethrowing first, all Exceptions: [" + + "], rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index 53f6c19ce43f1..7b561ccaede2d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -63,6 +63,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; public class TransportCloseJobAction extends TransportTasksAction< JobTask, @@ -537,7 +538,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new CloseJobAction.Response(true)); return; } @@ -546,11 +547,11 @@ private static void sendResponseOrFailure( + jobId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java index 5aa85a6331c22..495d75b2de2cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastAction.java @@ -249,7 +249,17 @@ private static void handleFailure(Exception e, DeleteForecastAction.Request requ ); } } else { - listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchStatusException( + "An error occurred while searching forecasts to delete", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)); + } } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java index ebe766f6b5669..767ec08078b42 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutJobAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -23,6 +23,7 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -120,8 +121,9 @@ protected void masterOperation( () -> format("[%s] failed to cleanup job after datafeed creation failure", request.getJobBuilder().getId()), deleteFailed ); - ElasticsearchException ex = new ElasticsearchException( + ElasticsearchStatusException ex = new ElasticsearchStatusException( "failed to cleanup job after datafeed creation failure", + RestStatus.REQUEST_TIMEOUT, failed ); ex.addSuppressed(deleteFailed); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java index c3d35fbc11593..42d36006acbde 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDataFrameAnalyticsAction.java @@ -8,7 +8,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -58,6 +57,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; + /** * Stops the persistent task for running data frame analytics. */ @@ -297,7 +298,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new StopDataFrameAnalyticsAction.Response(true)); return; } @@ -306,11 +307,11 @@ private static void sendResponseOrFailure( + analyticsId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index 2c9668a504b55..41359f5fcc166 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -58,6 +58,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ml.utils.ExceptionCollectionHandling.exceptionArrayToStatusException; public class TransportStopDatafeedAction extends TransportTasksAction< TransportStartDatafeedAction.DatafeedTask, @@ -462,7 +463,7 @@ private static void sendResponseOrFailure( AtomicArray failures ) { List caughtExceptions = failures.asList(); - if (caughtExceptions.size() == 0) { + if (caughtExceptions.isEmpty()) { listener.onResponse(new StopDatafeedAction.Response(true)); return; } @@ -471,11 +472,11 @@ private static void sendResponseOrFailure( + datafeedId + "] with [" + caughtExceptions.size() - + "] failures, rethrowing last, all Exceptions: [" + + "] failures, rethrowing first. All Exceptions: [" + caughtExceptions.stream().map(Exception::getMessage).collect(Collectors.joining(", ")) + "]"; - ElasticsearchException e = new ElasticsearchException(msg, caughtExceptions.get(0)); + ElasticsearchStatusException e = exceptionArrayToStatusException(failures, msg); listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java index 6335e0b78bd83..3f6193c124a9a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -28,6 +28,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -270,8 +271,9 @@ public void onFailure(Exception e) { @Override public void onTimeout(TimeValue timeout) { listener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "snapshot upgrader request [{}] [{}] timed out after [{}]", + RestStatus.REQUEST_TIMEOUT, params.getJobId(), params.getSnapshotId(), timeout diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java index 8df56d9df9c2f..6fce8aa20ed16 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregationBuilder.java @@ -7,11 +7,12 @@ package org.elasticsearch.xpack.ml.aggs.categorization; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -123,12 +124,13 @@ public CategorizeTextAggregationBuilder(StreamInput in) throws IOException { super(in); // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } this.bucketCountThresholds = new TermsAggregator.BucketCountThresholds(in); @@ -279,12 +281,13 @@ protected CategorizeTextAggregationBuilder( protected void doWriteTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } bucketCountThresholds.writeTo(out); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 0ed673ac5a365..9b337d559854a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -8,10 +8,11 @@ package org.elasticsearch.xpack.ml.aggs.categorization; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -109,12 +110,13 @@ public Bucket(SerializableTokenListCategory serializableCategory, long bucketOrd public Bucket(StreamInput in) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } serializableCategory = new SerializableTokenListCategory(in); @@ -127,12 +129,13 @@ public Bucket(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } serializableCategory.writeTo(out); @@ -239,12 +242,13 @@ public InternalCategorizationAggregation(StreamInput in) throws IOException { super(in); // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (in.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } this.similarityThreshold = in.readVInt(); @@ -257,12 +261,13 @@ public InternalCategorizationAggregation(StreamInput in) throws IOException { protected void doWriteTo(StreamOutput out) throws IOException { // Disallow this aggregation in mixed version clusters that cross the algorithm change boundary. if (out.getTransportVersion().before(CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION)) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "[" + CategorizeTextAggregationBuilder.NAME + "] aggregation cannot be used in a cluster where some nodes have version [" + CategorizeTextAggregationBuilder.ALGORITHM_CHANGED_VERSION - + "] or higher and others have a version before this" + + "] or higher and others have a version before this", + RestStatus.BAD_REQUEST ); } out.writeVInt(similarityThreshold); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java index 330327dc31a46..e61ffba9b3164 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfigAutoUpdater.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * This file has been contributed to be a Generative AI */ package org.elasticsearch.xpack.ml.datafeed; @@ -10,6 +12,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.IndicesOptions; @@ -17,6 +20,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; @@ -109,7 +113,20 @@ public void runUpdate() { logger.debug(() -> "[" + update.getId() + "] datafeed successfully updated"); } catch (Exception ex) { logger.warn(() -> "[" + update.getId() + "] failed being updated", ex); - failures.add(new ElasticsearchException("Failed to update datafeed {}", ex, update.getId())); + if (ex instanceof ElasticsearchException elasticsearchException) { + failures.add( + new ElasticsearchStatusException( + "Failed to update datafeed {}", + elasticsearchException.status(), + elasticsearchException, + update.getId() + ) + ); + } else { + failures.add( + new ElasticsearchStatusException("Failed to update datafeed {}", RestStatus.REQUEST_TIMEOUT, ex, update.getId()) + ); + } } } if (failures.isEmpty()) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index 168b0deda87d4..cc59903436e2f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; @@ -115,15 +116,14 @@ public void run(String modelId) { } } catch (Exception e) { LOGGER.error(() -> format("[%s] Error running inference on model [%s]", config.getId(), modelId), e); - - if (e instanceof ElasticsearchException) { - Throwable rootCause = ((ElasticsearchException) e).getRootCause(); - throw new ElasticsearchException( + if (e instanceof ElasticsearchException elasticsearchException) { + throw new ElasticsearchStatusException( "[{}] failed running inference on model [{}]; cause was [{}]", - rootCause, + elasticsearchException.status(), + elasticsearchException.getRootCause(), config.getId(), modelId, - rootCause.getMessage() + elasticsearchException.getRootCause().getMessage() ); } throw ExceptionsHelper.serverError( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index 015b88552a1d0..3ace40e0deb6b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; @@ -24,6 +24,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; @@ -238,13 +239,14 @@ static SearchResponse retryingSearch(Client client, String modelId, SearchReques * This intentionally prevents that code from attempting to retry loading the entire model. If the retry logic here * fails after the set retries we should not retry loading the entire model to avoid additional strain on the cluster. */ - throw new ElasticsearchException( + throw new ElasticsearchStatusException( format( "loading model [%s] failed after [%s] retries. The deployment is now in a failed state, " + "the error may be transient please stop the deployment and restart", modelId, retries ), + RestStatus.TOO_MANY_REQUESTS, e ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 5f796242e5bf8..d309ee2e5dc95 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -635,13 +635,19 @@ public void datafeedTimingStats( int unavailableShards = searchResponse.getTotalShards() - searchResponse.getSuccessfulShards(); if (CollectionUtils.isEmpty(shardFailures) == false) { LOGGER.error("[{}] Search request returned shard failures: {}", jobId, Arrays.toString(shardFailures)); - listener.onFailure(new ElasticsearchException(ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures))); + listener.onFailure( + new ElasticsearchStatusException( + ExceptionsHelper.shardFailuresToErrorMsg(jobId, shardFailures), + RestStatus.TOO_MANY_REQUESTS + ) + ); return; } if (unavailableShards > 0) { listener.onFailure( - new ElasticsearchException( - "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards" + new ElasticsearchStatusException( + "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards", + RestStatus.TOO_MANY_REQUESTS ) ); return; @@ -739,13 +745,19 @@ public void getAutodetectParams(Job job, String snapshotId, Consumer 0) { errorHandler.accept( - new ElasticsearchException( - "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards" + new ElasticsearchStatusException( + "[" + jobId + "] Search request encountered [" + unavailableShards + "] unavailable shards", + RestStatus.TOO_MANY_REQUESTS ) ); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java index 49b02bdd6ae74..f124deecd9914 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java @@ -8,7 +8,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.task.JobTask; @@ -61,7 +62,7 @@ void tryLock() { throw ExceptionsHelper.serverError("Failed to acquire process lock for job [" + jobTask.getJobId() + "]"); } } catch (InterruptedException e) { - throw new ElasticsearchException(e); + throw new ElasticsearchStatusException(e.getMessage(), RestStatus.TOO_MANY_REQUESTS, e); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java index 427b7c9defa5a..917d5881ae130 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.client.internal.OriginSettingClient; @@ -19,6 +19,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.annotations.Annotation; @@ -100,7 +101,13 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired annotations for job [" + job.getId() + "]", e)); + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired annotations for job [" + job.getId() + "]", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index ed4e6875e260a..424668a20bf05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -22,6 +23,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -75,7 +77,15 @@ public void remove(float requestsPerSec, ActionListener listener, Boole LOGGER.debug("Removing forecasts that expire before [{}]", cutoffEpochMs); ActionListener forecastStatsHandler = ActionListener.wrap( searchResponse -> deleteForecasts(searchResponse, requestsPerSec, listener, isTimedOutSupplier), - e -> listener.onFailure(new ElasticsearchException("An error occurred while searching forecasts to delete", e)) + e -> { + listener.onFailure( + new ElasticsearchStatusException( + "An error occurred while searching forecasts to delete", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); + } ); SearchSourceBuilder source = new SearchSourceBuilder(); @@ -143,7 +153,19 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired forecasts", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchException( + "Failed to remove expired forecasts", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure( + new ElasticsearchStatusException("Failed to remove expired forecasts", RestStatus.TOO_MANY_REQUESTS, e) + ); + } } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java index 1854e3b752de3..507e9dac6282d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java @@ -8,7 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ThreadedActionListener; @@ -16,6 +16,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -230,7 +231,14 @@ public void onResponse(QueryPage searchResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("[{}] Search for expired snapshots failed", e, job.getId())); + listener.onFailure( + new ElasticsearchStatusException( + "[{}] Search for expired snapshots failed", + RestStatus.TOO_MANY_REQUESTS, + e, + job.getId() + ) + ); } }; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java index 537297d130789..db712def11eac 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ThreadedActionListener; @@ -22,6 +23,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -115,7 +117,23 @@ public void onResponse(BulkByScrollResponse bulkByScrollResponse) { @Override public void onFailure(Exception e) { - listener.onFailure(new ElasticsearchException("Failed to remove expired results for job [" + job.getId() + "]", e)); + if (e instanceof ElasticsearchException elasticsearchException) { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired results for job [" + job.getId() + "]", + elasticsearchException.status(), + elasticsearchException + ) + ); + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Failed to remove expired results for job [" + job.getId() + "]", + RestStatus.TOO_MANY_REQUESTS, + e + ) + ); + } } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java new file mode 100644 index 0000000000000..d60194918274e --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/ExceptionCollectionHandling.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * This file was contributed to by generative AI + */ + +package org.elasticsearch.xpack.ml.utils; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.rest.RestStatus; + +import java.util.List; + +public class ExceptionCollectionHandling { + + /** + * Given an array of exceptions, return a single ElasticsearchStatusException. + * Return the first exception if all exceptions have 4XX status. + * Otherwise, return a generic 500 error. + * + * @param failures must not be empty or null + * @param message the message to use for the ElasticsearchStatusException + */ + public static ElasticsearchStatusException exceptionArrayToStatusException(AtomicArray failures, String message) { + + List caughtExceptions = failures.asList(); + if (caughtExceptions.isEmpty()) { + assert false : "method to combine exceptions called with no exceptions"; + return new ElasticsearchStatusException("No exceptions caught", RestStatus.INTERNAL_SERVER_ERROR); + } else { + + boolean allElasticsearchException = true; + boolean allStatus4xx = true; + + for (Exception exception : caughtExceptions) { + if (exception instanceof ElasticsearchException elasticsearchException) { + if (elasticsearchException.status().getStatus() < 400 || elasticsearchException.status().getStatus() >= 500) { + allStatus4xx = false; + } + } else { + allElasticsearchException = false; + break; + } + } + + if (allElasticsearchException && allStatus4xx) { + return new ElasticsearchStatusException( + message, + ((ElasticsearchException) caughtExceptions.get(0)).status(), + caughtExceptions.get(0) + ); + } else { + return new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + } + + } + + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java index 5fa434e530bc5..e87fbf48ca421 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java @@ -8,7 +8,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; @@ -192,8 +191,9 @@ public void bulkIndexWithRetry( ) { if (isShutdown || isResetMode) { finalListener.onFailure( - new ElasticsearchException( + new ElasticsearchStatusException( "Bulk indexing has failed as {}", + RestStatus.TOO_MANY_REQUESTS, isShutdown ? "node is shutting down." : "machine learning feature is being reset." ) ); @@ -233,8 +233,9 @@ private BulkResponse bulkIndexWithRetry( BiConsumer> actionExecutor ) { if (isShutdown || isResetMode) { - throw new ElasticsearchException( + throw new ElasticsearchStatusException( "Bulk indexing has failed as {}", + RestStatus.TOO_MANY_REQUESTS, isShutdown ? "node is shutting down." : "machine learning feature is being reset." ); } From c54ce6872386dbeea2384398f036cf0914ba7937 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 4 Dec 2023 18:54:38 +0100 Subject: [PATCH 186/263] Switch InstallPluginActionTests to non-blocking SecureRandom seed generator (#102893) * Force urandom on Linux * Unmute test --- distribution/tools/plugin-cli/build.gradle | 8 ++++++++ .../plugins/cli/InstallPluginActionTests.java | 1 - 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index e0d1dd983c0de..3859dfa1ddbb9 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ +import org.elasticsearch.gradle.OS + apply plugin: 'elasticsearch.build' base { @@ -38,6 +40,12 @@ tasks.named("dependencyLicenses").configure { tasks.named("test").configure { // TODO: find a way to add permissions for the tests in this module systemProperty 'tests.security.manager', 'false' + // These tests are "heavy" on the secure number generator. On Linux, the NativePRNG defaults to /dev/random for the seeds, and + // its entropy is quite limited, to the point that it's known to hang: https://bugs.openjdk.org/browse/JDK-6521844 + // We force the seed to be initialized from /dev/urandom, which is less secure, but in case of unit tests is not important. + if (OS.current() == OS.LINUX) { + systemProperty 'java.security.egd', 'file:/dev/urandom' + } } /* diff --git a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java index f7882a3fce743..c088e89338e74 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/elasticsearch/plugins/cli/InstallPluginActionTests.java @@ -118,7 +118,6 @@ import static org.mockito.Mockito.spy; @LuceneTestCase.SuppressFileSystems("*") -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102783") public class InstallPluginActionTests extends ESTestCase { private InstallPluginAction skipJarHellAction; From 67ab4b46ea380d565f7ae2bec0a3d95587613a42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Mon, 4 Dec 2023 19:02:21 +0100 Subject: [PATCH 187/263] [Transform] Ensure transform updates only modify the expected transform task (#102934) --- docs/changelog/102934.yaml | 6 ++++++ .../core/transform/action/UpdateTransformAction.java | 10 ++++++++++ .../action/UpdateTransformActionRequestTests.java | 9 +++++++++ 3 files changed, 25 insertions(+) create mode 100644 docs/changelog/102934.yaml diff --git a/docs/changelog/102934.yaml b/docs/changelog/102934.yaml new file mode 100644 index 0000000000000..4f61427506cf3 --- /dev/null +++ b/docs/changelog/102934.yaml @@ -0,0 +1,6 @@ +pr: 102934 +summary: Ensure transform updates only modify the expected transform task +area: Transform +type: bug +issues: + - 102933 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java index f9da4082dbfa2..b2a764b0be5b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -186,6 +187,15 @@ public boolean equals(Object obj) { && Objects.equals(authState, other.authState) && getTimeout().equals(other.getTimeout()); } + + @Override + public boolean match(Task task) { + if (task.getDescription().startsWith(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX)) { + String taskId = task.getDescription().substring(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX.length()); + return taskId.equals(this.id); + } + return false; + } } public static class Response extends BaseTasksResponse implements ToXContentObject { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java index bcfe2b1728cbf..9c90a10e204f0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/UpdateTransformActionRequestTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.xpack.core.transform.action.UpdateTransformAction.Request; import org.elasticsearch.xpack.core.transform.transforms.AuthorizationStateTests; import org.elasticsearch.xpack.core.transform.transforms.TransformConfigTests; @@ -74,4 +75,12 @@ protected Request mutateInstance(Request instance) { return new Request(update, id, deferValidation, timeout); } + + public void testMatch() { + Request request = new Request(randomTransformConfigUpdate(), "my-transform-7", false, null); + assertTrue(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-7", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-77", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "my-transform-7", null, null))); + } } From 5b0aec537e54339c6d90fa367a5fc2ce7cce3ecc Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 4 Dec 2023 19:09:30 +0100 Subject: [PATCH 188/263] Fix remaining leaked SearchResponse issues in :server:test (#102897) Same as #102896, handling almost all of the remaining spots (just a handful of tricky ones left that I'll open a separate PR for). --- .../search/KnnSearchSingleNodeTests.java | 218 ++++----- .../action/search/SearchAsyncActionTests.java | 146 ++++--- .../search/SearchResponseMergerTests.java | 9 +- .../action/search/SearchResponseTests.java | 412 ++++++++++-------- .../TransportMultiSearchActionTests.java | 32 +- .../search/TransportSearchActionTests.java | 108 +++-- .../FieldStatsProviderRefreshTests.java | 45 +- .../flattened/FlattenedFieldSearchTests.java | 217 ++++----- .../search/SearchServiceTests.java | 56 ++- .../search/geo/GeoPointShapeQueryTests.java | 5 +- .../snapshots/SnapshotResiliencyTests.java | 16 +- .../ESBlobStoreRepositoryIntegTestCase.java | 15 +- .../geo/BasePointShapeQueryTestCase.java | 3 +- .../search/geo/BaseShapeIntegTestCase.java | 2 +- 14 files changed, 719 insertions(+), 565 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java b/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java index e6abe2f041a4c..a678956b20e59 100644 --- a/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/KnnSearchSingleNodeTests.java @@ -26,6 +26,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -62,15 +63,17 @@ public void testKnnSearchRemovedVector() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 20, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .setSize(10) - .get(); - - // Originally indexed 20 documents, but deleted vector field with an update, so only 19 should be hit - assertHitCount(response, 19); - assertEquals(10, response.getHits().getHits().length); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .setSize(10), + response -> { + // Originally indexed 20 documents, but deleted vector field with an update, so only 19 should be hit + assertHitCount(response, 19); + assertEquals(10, response.getHits().getHits().length); + } + ); // Make sure we still have 20 docs assertHitCount(client().prepareSearch("index").setSize(0).setTrackTotalHits(true), 20); } @@ -104,19 +107,22 @@ public void testKnnWithQuery() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .addFetchField("*") - .setSize(10) - .get(); - - // The total hits is k plus the number of text matches - assertHitCount(response, 15); - assertEquals(10, response.getHits().getHits().length); - - // Because of the boost, vector results should appear first - assertNotNull(response.getHits().getAt(0).field("vector")); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .addFetchField("*") + .setSize(10), + response -> { + + // The total hits is k plus the number of text matches + assertHitCount(response, 15); + assertEquals(10, response.getHits().getHits().length); + + // Because of the boost, vector results should appear first + assertNotNull(response.getHits().getAt(0).field("vector")); + } + ); } public void testKnnFilter() throws IOException { @@ -150,13 +156,13 @@ public void testKnnFilter() throws IOException { KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).addFilterQuery( QueryBuilders.termsQuery("field", "second") ); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10).get(); - - assertHitCount(response, 5); - assertEquals(5, response.getHits().getHits().length); - for (SearchHit hit : response.getHits().getHits()) { - assertEquals("second", hit.field("field").getValue()); - } + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10), response -> { + assertHitCount(response, 5); + assertEquals(5, response.getHits().getHits().length); + for (SearchHit hit : response.getHits().getHits()) { + assertEquals("second", hit.field("field").getValue()); + } + }); } public void testKnnFilterWithRewrite() throws IOException { @@ -193,10 +199,10 @@ public void testKnnFilterWithRewrite() throws IOException { KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).addFilterQuery( QueryBuilders.termsLookupQuery("field", new TermsLookup("index", "lookup-doc", "other-field")) ); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).setSize(10).get(); - - assertHitCount(response, 5); - assertEquals(5, response.getHits().getHits().length); + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).setSize(10), response -> { + assertHitCount(response, 5); + assertEquals(5, response.getHits().getHits().length); + }); } public void testMultiKnnClauses() throws IOException { @@ -239,26 +245,29 @@ public void testMultiKnnClauses() throws IOException { float[] queryVector = randomVector(20f, 21f); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null).boost(5.0f); KnnSearchBuilder knnSearch2 = new KnnSearchBuilder("vector_2", queryVector, 5, 50, null).boost(10.0f); - SearchResponse response = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch, knnSearch2)) - .setQuery(QueryBuilders.matchQuery("text", "goodnight")) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - - // The total hits is k plus the number of text matches - assertHitCount(response, 20); - assertEquals(10, response.getHits().getHits().length); - InternalStats agg = response.getAggregations().get("stats"); - assertThat(agg.getCount(), equalTo(20L)); - assertThat(agg.getMax(), equalTo(3.0)); - assertThat(agg.getMin(), equalTo(1.0)); - assertThat(agg.getAvg(), equalTo(2.25)); - assertThat(agg.getSum(), equalTo(45.0)); - - // Because of the boost & vector distributions, vector_2 results should appear first - assertNotNull(response.getHits().getAt(0).field("vector_2")); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch, knnSearch2)) + .setQuery(QueryBuilders.matchQuery("text", "goodnight")) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + response -> { + + // The total hits is k plus the number of text matches + assertHitCount(response, 20); + assertEquals(10, response.getHits().getHits().length); + InternalStats agg = response.getAggregations().get("stats"); + assertThat(agg.getCount(), equalTo(20L)); + assertThat(agg.getMax(), equalTo(3.0)); + assertThat(agg.getMin(), equalTo(1.0)); + assertThat(agg.getAvg(), equalTo(2.25)); + assertThat(agg.getSum(), equalTo(45.0)); + + // Because of the boost & vector distributions, vector_2 results should appear first + assertNotNull(response.getHits().getAt(0).field("vector_2")); + } + ); } public void testMultiKnnClausesSameDoc() throws IOException { @@ -298,38 +307,42 @@ public void testMultiKnnClausesSameDoc() throws IOException { // Having the same query vector and same docs should mean our KNN scores are linearly combined if the same doc is matched KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 5, 50, null); KnnSearchBuilder knnSearch2 = new KnnSearchBuilder("vector_2", queryVector, 5, 50, null); - SearchResponse responseOneKnn = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch)) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - SearchResponse responseBothKnn = client().prepareSearch("index") - .setKnnSearch(List.of(knnSearch, knnSearch2)) - .addFetchField("*") - .setSize(10) - .addAggregation(AggregationBuilders.stats("stats").field("number")) - .get(); - - // The total hits is k matched docs - assertHitCount(responseOneKnn, 5); - assertHitCount(responseBothKnn, 5); - assertEquals(5, responseOneKnn.getHits().getHits().length); - assertEquals(5, responseBothKnn.getHits().getHits().length); - - for (int i = 0; i < responseOneKnn.getHits().getHits().length; i++) { - SearchHit oneHit = responseOneKnn.getHits().getHits()[i]; - SearchHit bothHit = responseBothKnn.getHits().getHits()[i]; - assertThat(bothHit.getId(), equalTo(oneHit.getId())); - assertThat(bothHit.getScore(), greaterThan(oneHit.getScore())); - } - InternalStats oneAgg = responseOneKnn.getAggregations().get("stats"); - InternalStats bothAgg = responseBothKnn.getAggregations().get("stats"); - assertThat(bothAgg.getCount(), equalTo(oneAgg.getCount())); - assertThat(bothAgg.getAvg(), equalTo(oneAgg.getAvg())); - assertThat(bothAgg.getMax(), equalTo(oneAgg.getMax())); - assertThat(bothAgg.getSum(), equalTo(oneAgg.getSum())); - assertThat(bothAgg.getMin(), equalTo(oneAgg.getMin())); + assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch)) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + responseOneKnn -> assertResponse( + client().prepareSearch("index") + .setKnnSearch(List.of(knnSearch, knnSearch2)) + .addFetchField("*") + .setSize(10) + .addAggregation(AggregationBuilders.stats("stats").field("number")), + responseBothKnn -> { + + // The total hits is k matched docs + assertHitCount(responseOneKnn, 5); + assertHitCount(responseBothKnn, 5); + assertEquals(5, responseOneKnn.getHits().getHits().length); + assertEquals(5, responseBothKnn.getHits().getHits().length); + + for (int i = 0; i < responseOneKnn.getHits().getHits().length; i++) { + SearchHit oneHit = responseOneKnn.getHits().getHits()[i]; + SearchHit bothHit = responseBothKnn.getHits().getHits()[i]; + assertThat(bothHit.getId(), equalTo(oneHit.getId())); + assertThat(bothHit.getScore(), greaterThan(oneHit.getScore())); + } + InternalStats oneAgg = responseOneKnn.getAggregations().get("stats"); + InternalStats bothAgg = responseBothKnn.getAggregations().get("stats"); + assertThat(bothAgg.getCount(), equalTo(oneAgg.getCount())); + assertThat(bothAgg.getAvg(), equalTo(oneAgg.getAvg())); + assertThat(bothAgg.getMax(), equalTo(oneAgg.getMax())); + assertThat(bothAgg.getSum(), equalTo(oneAgg.getSum())); + assertThat(bothAgg.getMin(), equalTo(oneAgg.getMin())); + } + ) + ); } public void testKnnFilteredAlias() throws IOException { @@ -366,10 +379,11 @@ public void testKnnFilteredAlias() throws IOException { float[] queryVector = randomVector(); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 10, 50, null); - SearchResponse response = client().prepareSearch("test-alias").setKnnSearch(List.of(knnSearch)).setSize(10).get(); - - assertHitCount(response, expectedHits); - assertEquals(expectedHits, response.getHits().getHits().length); + final int expectedHitCount = expectedHits; + assertResponse(client().prepareSearch("test-alias").setKnnSearch(List.of(knnSearch)).setSize(10), response -> { + assertHitCount(response, expectedHitCount); + assertEquals(expectedHitCount, response.getHits().getHits().length); + }); } public void testKnnSearchAction() throws IOException { @@ -399,14 +413,14 @@ public void testKnnSearchAction() throws IOException { // Since there's no kNN search action at the transport layer, we just emulate // how the action works (it builds a kNN query under the hood) float[] queryVector = randomVector(); - SearchResponse response = client().prepareSearch("index1", "index2") - .setQuery(new KnnVectorQueryBuilder("vector", queryVector, 5, null)) - .setSize(2) - .get(); - - // The total hits is num_cands * num_shards, since the query gathers num_cands hits from each shard - assertHitCount(response, 5 * 2); - assertEquals(2, response.getHits().getHits().length); + assertResponse( + client().prepareSearch("index1", "index2").setQuery(new KnnVectorQueryBuilder("vector", queryVector, 5, null)).setSize(2), + response -> { + // The total hits is num_cands * num_shards, since the query gathers num_cands hits from each shard + assertHitCount(response, 5 * 2); + assertEquals(2, response.getHits().getHits().length); + } + ); } public void testKnnVectorsWith4096Dims() throws IOException { @@ -434,11 +448,11 @@ public void testKnnVectorsWith4096Dims() throws IOException { float[] queryVector = randomVector(4096); KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 3, 50, null).boost(5.0f); - SearchResponse response = client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10).get(); - - assertHitCount(response, 3); - assertEquals(3, response.getHits().getHits().length); - assertEquals(4096, response.getHits().getAt(0).field("vector").getValues().size()); + assertResponse(client().prepareSearch("index").setKnnSearch(List.of(knnSearch)).addFetchField("*").setSize(10), response -> { + assertHitCount(response, 3); + assertEquals(3, response.getHits().getHits().length); + assertEquals(4096, response.getHits().getAt(0).field("vector").getValues().size()); + }); } private float[] randomVector() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 430e66c116744..a02eddf039e46 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -313,6 +313,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean latchTriggered = new AtomicBoolean(); var results = new ArraySearchPhaseResults(shardsIter.size()); + final TestSearchResponse testResponse = new TestSearchResponse(); try { AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( "test", @@ -335,7 +336,6 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY ) { - final TestSearchResponse response = new TestSearchResponse(); @Override protected void executePhaseOnShard( @@ -343,7 +343,7 @@ protected void executePhaseOnShard( SearchShardTarget shard, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); + assertTrue("shard: " + shard.getShardId() + " has been queried twice", testResponse.queried.add(shard.getShardId())); Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), @@ -368,7 +368,7 @@ public void run() { assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId()); sendReleaseSearchContext(result.getContextId(), new MockConnection(result.node), OriginalIndices.NONE); } - responseListener.onResponse(response); + responseListener.onResponse(testResponse); if (latchTriggered.compareAndSet(false, true) == false) { throw new AssertionError("latch triggered twice"); } @@ -391,6 +391,7 @@ public void run() { final List runnables = executor.shutdownNow(); assertThat(runnables, equalTo(Collections.emptyList())); } finally { + testResponse.decRef(); results.decRef(); } } @@ -437,79 +438,82 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", AliasFilter.EMPTY); ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); - AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( - "test", - logger, - transportService, - (cluster, node) -> { - assert cluster == null : "cluster was not null: " + cluster; - return lookup.get(node); - }, - aliasFilters, - Collections.emptyMap(), - executor, - request, - responseListener, - shardsIter, - new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0), - ClusterState.EMPTY_STATE, - null, - new ArraySearchPhaseResults<>(shardsIter.size()), - request.getMaxConcurrentShardRequests(), - SearchResponse.Clusters.EMPTY - ) { - final TestSearchResponse response = new TestSearchResponse(); - - @Override - protected void executePhaseOnShard( - SearchShardIterator shardIt, - SearchShardTarget shard, - SearchActionListener listener + final TestSearchResponse response = new TestSearchResponse(); + try { + AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( + "test", + logger, + transportService, + (cluster, node) -> { + assert cluster == null : "cluster was not null: " + cluster; + return lookup.get(node); + }, + aliasFilters, + Collections.emptyMap(), + executor, + request, + responseListener, + shardsIter, + new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0), + ClusterState.EMPTY_STATE, + null, + new ArraySearchPhaseResults<>(shardsIter.size()), + request.getMaxConcurrentShardRequests(), + SearchResponse.Clusters.EMPTY ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); - final TestSearchPhaseResult testSearchPhaseResult; - if (shard.getShardId().id() == 0) { - testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); - } else { - testSearchPhaseResult = new TestSearchPhaseResult( - new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), - connection.getNode() - ); - Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); - ids.add(testSearchPhaseResult.getContextId()); - } - if (randomBoolean()) { - listener.onResponse(testSearchPhaseResult); - } else { - new Thread(() -> listener.onResponse(testSearchPhaseResult)).start(); + @Override + protected void executePhaseOnShard( + SearchShardIterator shardIt, + SearchShardTarget shard, + SearchActionListener listener + ) { + assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); + Transport.Connection connection = getConnection(null, shard.getNodeId()); + final TestSearchPhaseResult testSearchPhaseResult; + if (shard.getShardId().id() == 0) { + testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); + } else { + testSearchPhaseResult = new TestSearchPhaseResult( + new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), + connection.getNode() + ); + Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); + ids.add(testSearchPhaseResult.getContextId()); + } + if (randomBoolean()) { + listener.onResponse(testSearchPhaseResult); + } else { + new Thread(() -> listener.onResponse(testSearchPhaseResult)).start(); + } } - } - @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { - return new SearchPhase("test") { - @Override - public void run() { - throw new RuntimeException("boom"); - } - }; + @Override + protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + return new SearchPhase("test") { + @Override + public void run() { + throw new RuntimeException("boom"); + } + }; + } + }; + asyncAction.start(); + latch.await(); + assertNotNull(failure.get()); + assertThat(failure.get().getCause().getMessage(), containsString("boom")); + assertFalse(nodeToContextMap.isEmpty()); + assertTrue(nodeToContextMap.toString(), nodeToContextMap.containsKey(primaryNode) || nodeToContextMap.containsKey(replicaNode)); + assertEquals(shardsIter.size() - 1, numFreedContext.get()); + if (nodeToContextMap.containsKey(primaryNode)) { + assertTrue(nodeToContextMap.get(primaryNode).toString(), nodeToContextMap.get(primaryNode).isEmpty()); + } else { + assertTrue(nodeToContextMap.get(replicaNode).toString(), nodeToContextMap.get(replicaNode).isEmpty()); } - }; - asyncAction.start(); - latch.await(); - assertNotNull(failure.get()); - assertThat(failure.get().getCause().getMessage(), containsString("boom")); - assertFalse(nodeToContextMap.isEmpty()); - assertTrue(nodeToContextMap.toString(), nodeToContextMap.containsKey(primaryNode) || nodeToContextMap.containsKey(replicaNode)); - assertEquals(shardsIter.size() - 1, numFreedContext.get()); - if (nodeToContextMap.containsKey(primaryNode)) { - assertTrue(nodeToContextMap.get(primaryNode).toString(), nodeToContextMap.get(primaryNode).isEmpty()); - } else { - assertTrue(nodeToContextMap.get(replicaNode).toString(), nodeToContextMap.get(replicaNode).isEmpty()); + final List runnables = executor.shutdownNow(); + assertThat(runnables, equalTo(Collections.emptyList())); + } finally { + response.decRef(); } - final List runnables = executor.shutdownNow(); - assertThat(runnables, equalTo(Collections.emptyList())); } public void testAllowPartialResults() throws InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index e250964c50687..dc6e69b15ee32 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -77,7 +77,14 @@ public void init() { private void addResponse(SearchResponseMerger searchResponseMerger, SearchResponse searchResponse) { if (randomBoolean()) { - executorService.submit(() -> searchResponseMerger.add(searchResponse)); + searchResponse.incRef(); + executorService.submit(() -> { + try { + searchResponseMerger.add(searchResponse); + } finally { + searchResponse.decRef(); + } + }); } else { searchResponseMerger.add(searchResponse); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index b02dea53bc8b9..b45a04922c187 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -271,7 +271,12 @@ static SearchResponse.Clusters createCCSClusterObject( * compare xContent, so we omit it here */ public void testFromXContent() throws IOException { - doFromXContentTestWithRandomFields(createTestItem(), false); + var response = createTestItem(); + try { + doFromXContentTestWithRandomFields(response, false); + } finally { + response.decRef(); + } } /** @@ -281,7 +286,12 @@ public void testFromXContent() throws IOException { * fields to SearchHits, Aggregations etc... is tested in their own tests */ public void testFromXContentWithRandomFields() throws IOException { - doFromXContentTestWithRandomFields(createMinimalTestItem(), true); + var response = createMinimalTestItem(); + try { + doFromXContentTestWithRandomFields(response, true); + } finally { + response.decRef(); + } } private void doFromXContentTestWithRandomFields(SearchResponse response, boolean addRandomFields) throws IOException { @@ -328,15 +338,15 @@ public void testFromXContentWithFailures() throws IOException { for (int i = 0; i < failures.length; i++) { failures[i] = ShardSearchFailureTests.createTestItem(IndexMetadata.INDEX_UUID_NA_VALUE); } + BytesReference originalBytes; SearchResponse response = createTestItem(failures); XContentType xcontentType = randomFrom(XContentType.values()); - final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); - BytesReference originalBytes = toShuffledXContent( - ChunkedToXContent.wrapAsToXContent(response), - xcontentType, - params, - randomBoolean() - ); + try { + final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); + originalBytes = toShuffledXContent(ChunkedToXContent.wrapAsToXContent(response), xcontentType, params, randomBoolean()); + } finally { + response.decRef(); + } try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) { SearchResponse parsed = SearchResponse.fromXContent(parser); try { @@ -388,26 +398,30 @@ public void testToXContent() throws IOException { ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 0, - "successful": 0, - "skipped": 0, - "failed": 0 - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ { "_id": "id1", "_score": 2.0 } ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 0, + "successful": 0, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 100, + "relation": "eq" + }, + "max_score": 1.5, + "hits": [ { "_id": "id1", "_score": 2.0 } ] + } + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } { SearchResponse response = new SearchResponse( @@ -428,34 +442,38 @@ public void testToXContent() throws IOException { ShardSearchFailure.EMPTY_ARRAY, new SearchResponse.Clusters(5, 3, 2) ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 0, - "successful": 0, - "skipped": 0, - "failed": 0 - }, - "_clusters": { - "total": 5, - "successful": 3, - "skipped": 2, - "running":0, - "partial": 0, - "failed": 0 - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ { "_id": "id1", "_score": 2.0 } ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 0, + "successful": 0, + "skipped": 0, + "failed": 0 + }, + "_clusters": { + "total": 5, + "successful": 3, + "skipped": 2, + "running":0, + "partial": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 100, + "relation": "eq" + }, + "max_score": 1.5, + "hits": [ { "_id": "id1", "_score": 2.0 } ] + } + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } { SearchResponse response = new SearchResponse( @@ -485,142 +503,154 @@ public void testToXContent() throws IOException { new ShardSearchFailure[] { new ShardSearchFailure(new IllegalStateException("corrupt index")) } ) ); - String expectedString = XContentHelper.stripWhitespace(""" - { - "took": 0, - "timed_out": false, - "_shards": { - "total": 20, - "successful": 9, - "skipped": 2, - "failed": 0 - }, - "_clusters": { - "total": 4, - "successful": 1, - "skipped": 1, - "running":0, - "partial": 1, - "failed": 1, - "details": { - "(local)": { - "status": "successful", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 5, - "skipped": 1, - "failed": 0 - } - }, - "cluster_1": { - "status": "skipped", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 0, - "skipped": 0, - "failed": 5 - }, - "failures": [ - { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" - } - } - ] + try { + String expectedString = XContentHelper.stripWhitespace(""" + { + "took": 0, + "timed_out": false, + "_shards": { + "total": 20, + "successful": 9, + "skipped": 2, + "failed": 0 }, - "cluster_2": { - "status": "failed", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 0, - "skipped": 0, - "failed": 5 - }, - "failures": [ - { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" + "_clusters": { + "total": 4, + "successful": 1, + "skipped": 1, + "running":0, + "partial": 1, + "failed": 1, + "details": { + "(local)": { + "status": "successful", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 5, + "skipped": 1, + "failed": 0 } + }, + "cluster_1": { + "status": "skipped", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 0, + "skipped": 0, + "failed": 5 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] + }, + "cluster_2": { + "status": "failed", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 0, + "skipped": 0, + "failed": 5 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] + }, + "cluster_0": { + "status": "partial", + "indices": "foo,bar*", + "took": 1000, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 4, + "skipped": 1, + "failed": 1 + }, + "failures": [ + { + "shard": -1, + "index": null, + "reason": { + "type": "illegal_state_exception", + "reason": "corrupt index" + } + } + ] } - ] + } }, - "cluster_0": { - "status": "partial", - "indices": "foo,bar*", - "took": 1000, - "timed_out": false, - "_shards": { - "total": 5, - "successful": 4, - "skipped": 1, - "failed": 1 + "hits": { + "total": { + "value": 100, + "relation": "eq" }, - "failures": [ + "max_score": 1.5, + "hits": [ { - "shard": -1, - "index": null, - "reason": { - "type": "illegal_state_exception", - "reason": "corrupt index" - } + "_id": "id1", + "_score": 2.0 } ] } - } - }, - "hits": { - "total": { - "value": 100, - "relation": "eq" - }, - "max_score": 1.5, - "hits": [ - { - "_id": "id1", - "_score": 2.0 - } - ] - } - }"""); - assertEquals(expectedString, Strings.toString(response)); + }"""); + assertEquals(expectedString, Strings.toString(response)); + } finally { + response.decRef(); + } } } public void testSerialization() throws IOException { SearchResponse searchResponse = createTestItem(false); - SearchResponse deserialized = copyWriteable( - searchResponse, - namedWriteableRegistry, - SearchResponse::new, - TransportVersion.current() - ); - if (searchResponse.getHits().getTotalHits() == null) { - assertNull(deserialized.getHits().getTotalHits()); - } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + try { + SearchResponse deserialized = copyWriteable( + searchResponse, + namedWriteableRegistry, + SearchResponse::new, + TransportVersion.current() + ); + try { + if (searchResponse.getHits().getTotalHits() == null) { + assertNull(deserialized.getHits().getTotalHits()); + } else { + assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); + assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + } + assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); + assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); + assertEquals(searchResponse.getFailedShards(), deserialized.getFailedShards()); + assertEquals(searchResponse.getTotalShards(), deserialized.getTotalShards()); + assertEquals(searchResponse.getSkippedShards(), deserialized.getSkippedShards()); + assertEquals(searchResponse.getClusters(), deserialized.getClusters()); + } finally { + deserialized.decRef(); + } + } finally { + searchResponse.decRef(); } - assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); - assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); - assertEquals(searchResponse.getFailedShards(), deserialized.getFailedShards()); - assertEquals(searchResponse.getTotalShards(), deserialized.getTotalShards()); - assertEquals(searchResponse.getSkippedShards(), deserialized.getSkippedShards()); - assertEquals(searchResponse.getClusters(), deserialized.getClusters()); } public void testToXContentEmptyClusters() throws IOException { @@ -634,15 +664,23 @@ public void testToXContentEmptyClusters() throws IOException { ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - SearchResponse deserialized = copyWriteable( - searchResponse, - namedWriteableRegistry, - SearchResponse::new, - TransportVersion.current() - ); - XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); - deserialized.getClusters().toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals(0, Strings.toString(builder).length()); + try { + SearchResponse deserialized = copyWriteable( + searchResponse, + namedWriteableRegistry, + SearchResponse::new, + TransportVersion.current() + ); + try { + XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); + deserialized.getClusters().toXContent(builder, ToXContent.EMPTY_PARAMS); + assertEquals(0, Strings.toString(builder).length()); + } finally { + deserialized.decRef(); + } + } finally { + searchResponse.decRef(); + } } public void testClustersHasRemoteCluster() { diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index 70bd2d9f00a05..1097174628e58 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -83,7 +83,12 @@ public void search(final SearchRequest request, final ActionListener 1L, SearchResponse.Clusters.EMPTY)); + var response = SearchResponse.empty(() -> 1L, SearchResponse.Clusters.EMPTY); + try { + listener.onResponse(response); + } finally { + response.decRef(); + } } @Override @@ -161,18 +166,21 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - listener.onResponse( - new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) + var response = new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 0, + 0, + 0, + 0L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY ); + try { + listener.onResponse(response); + } finally { + response.decRef(); + } }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 366161881d30f..7090d590a4901 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -542,7 +542,7 @@ public void testCCSRemoteReduceMergeFails() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -584,9 +584,12 @@ public void testCCSRemoteReduce() throws Exception { SearchRequest searchRequest = new SearchRequest(); final CountDownLatch latch = new CountDownLatch(1); SetOnce>> setOnce = new SetOnce<>(); - AtomicReference response = new AtomicReference<>(); + final SetOnce response = new SetOnce<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.incRef(); + response.set(newValue); + }), latch ); TransportSearchAction.ccsRemoteReduce( @@ -608,18 +611,25 @@ public void testCCSRemoteReduce() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - assertEquals(totalClusters, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + try { + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + assertEquals( + totalClusters, + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL) + ); + assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } } { SearchRequest searchRequest = new SearchRequest(); @@ -650,7 +660,7 @@ public void testCCSRemoteReduce() throws Exception { Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -710,7 +720,7 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); assertNotNull(failure.get()); @@ -728,9 +738,12 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti SearchRequest searchRequest = new SearchRequest(); final CountDownLatch latch = new CountDownLatch(1); SetOnce>> setOnce = new SetOnce<>(); - AtomicReference response = new AtomicReference<>(); + SetOnce response = new SetOnce<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.mustIncRef(); + response.set(newValue); + }), latch ); Set clusterAliases = new HashSet<>(remoteClusterService.getRegisteredRemoteClusterNames()); @@ -756,22 +769,26 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals( - disconnectedNodesIndices.size(), - searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED) - ); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - int successful = totalClusters - disconnectedNodesIndices.size(); - assertEquals(successful, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(successful == 0 ? 0 : successful + 1, searchResponse.getNumReducePhases()); + try { + assertEquals( + disconnectedNodesIndices.size(), + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED) + ); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + int successful = totalClusters - disconnectedNodesIndices.size(); + assertEquals(successful, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(successful == 0 ? 0 : successful + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } } // give transport service enough time to realize that the node is down, and to notify the connection listeners @@ -794,7 +811,10 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti SetOnce>> setOnce = new SetOnce<>(); AtomicReference response = new AtomicReference<>(); LatchedActionListener listener = new LatchedActionListener<>( - ActionTestUtils.assertNoFailureListener(response::set), + ActionTestUtils.assertNoFailureListener(newValue -> { + newValue.mustIncRef(); + response.set(newValue); + }), latch ); Set clusterAliases = new HashSet<>(remoteClusterService.getRegisteredRemoteClusterNames()); @@ -820,18 +840,25 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti Tuple> tuple = setOnce.get(); assertEquals("", tuple.v1().getLocalClusterAlias()); assertThat(tuple.v2(), instanceOf(TransportSearchAction.CCSActionListener.class)); - tuple.v2().onResponse(emptySearchResponse()); + resolveWithEmptySearchResponse(tuple); } awaitLatch(latch, 5, TimeUnit.SECONDS); SearchResponse searchResponse = response.get(); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); - assertEquals(totalClusters, searchResponse.getClusters().getTotal()); - assertEquals(totalClusters, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); - assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); - assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + try { + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED)); + assertEquals(totalClusters, searchResponse.getClusters().getTotal()); + assertEquals( + totalClusters, + searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.SUCCESSFUL) + ); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.RUNNING)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL)); + assertEquals(0, searchResponse.getClusters().getClusterStateCount(SearchResponse.Cluster.Status.FAILED)); + assertEquals(totalClusters == 1 ? 1 : totalClusters + 1, searchResponse.getNumReducePhases()); + } finally { + searchResponse.decRef(); + } }); assertEquals(0, service.getConnectionManager().size()); } finally { @@ -841,6 +868,15 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti } } + private static void resolveWithEmptySearchResponse(Tuple> tuple) { + var resp = emptySearchResponse(); + try { + tuple.v2().onResponse(resp); + } finally { + resp.decRef(); + } + } + public void testCollectSearchShards() throws Exception { int numClusters = randomIntBetween(2, 10); DiscoveryNode[] nodes = new DiscoveryNode[numClusters]; diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index a0eff567274dc..6a87c0f704600 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndicesRequestCache; @@ -18,7 +17,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -42,23 +41,23 @@ public void testQueryRewriteOnRefresh() throws Exception { // Search for a range and check that it missed the cache (since its the // first time it has run) - final SearchResponse r1 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r1); - assertThat(r1.getHits().getTotalHits().value, equalTo(3L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r1 -> assertThat(r1.getHits().getTotalHits().value, equalTo(3L)) + ); assertRequestCacheStats(0, 1); // Search again and check it hits the cache - final SearchResponse r2 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r2); - assertThat(r2.getHits().getTotalHits().value, equalTo(3L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r2 -> assertThat(r2.getHits().getTotalHits().value, equalTo(3L)) + ); assertRequestCacheStats(1, 1); // Index some more documents in the query range and refresh @@ -67,13 +66,13 @@ public void testQueryRewriteOnRefresh() throws Exception { refreshIndex(); // Search again and check the request cache for another miss since request cache should be invalidated by refresh - final SearchResponse r3 = client().prepareSearch("index") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) - .get(); - assertNoFailures(r3); - assertThat(r3.getHits().getTotalHits().value, equalTo(5L)); + assertNoFailuresAndResponse( + client().prepareSearch("index") + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), + r3 -> assertThat(r3.getHits().getTotalHits().value, equalTo(5L)) + ); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java index 2c4c620c057b2..143aab4e58c78 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; @@ -42,7 +41,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -204,30 +205,34 @@ public void testCardinalityAggregation() throws IOException { assertNoFailures(bulkResponse); // Test the root flattened field. - SearchResponse response = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened")) - .get(); - - assertNoFailures(response); - Cardinality count = response.getAggregations().get("cardinality"); - assertCardinality(count, numDocs, precisionThreshold); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened")), + response -> { + Cardinality count = response.getAggregations().get("cardinality"); + assertCardinality(count, numDocs, precisionThreshold); + } + ); // Test two keyed flattened fields. - SearchResponse firstResponse = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.first")) - .get(); - assertNoFailures(firstResponse); - - Cardinality firstCount = firstResponse.getAggregations().get("cardinality"); - assertCardinality(firstCount, numDocs, precisionThreshold); - - SearchResponse secondResponse = client().prepareSearch("test") - .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.second")) - .get(); - assertNoFailures(secondResponse); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.first")), + firstResponse -> { + + Cardinality firstCount = firstResponse.getAggregations().get("cardinality"); + assertCardinality(firstCount, numDocs, precisionThreshold); + } + ); - Cardinality secondCount = secondResponse.getAggregations().get("cardinality"); - assertCardinality(secondCount, (numDocs + 1) / 2, precisionThreshold); + assertNoFailuresAndResponse( + client().prepareSearch("test") + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.second")), + secondResponse -> { + Cardinality secondCount = secondResponse.getAggregations().get("cardinality"); + assertCardinality(secondCount, (numDocs + 1) / 2, precisionThreshold); + } + ); } private void assertCardinality(Cardinality count, long value, int precisionThreshold) { @@ -262,60 +267,56 @@ public void testTermsAggregation() throws IOException { // Aggregate on the root 'labels' field. TermsAggregationBuilder builder = createTermsAgg("labels"); - SearchResponse response = client().prepareSearch("test").addAggregation(builder).get(); - assertNoFailures(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(6)); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(builder), response -> { + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(6)); - Terms.Bucket bucket1 = terms.getBuckets().get(0); - assertEquals("urgent", bucket1.getKey()); - assertEquals(5, bucket1.getDocCount()); + Terms.Bucket bucket1 = terms.getBuckets().get(0); + assertEquals("urgent", bucket1.getKey()); + assertEquals(5, bucket1.getDocCount()); - Terms.Bucket bucket2 = terms.getBuckets().get(1); - assertThat(bucket2.getKeyAsString(), startsWith("v1.2.")); - assertEquals(1, bucket2.getDocCount()); + Terms.Bucket bucket2 = terms.getBuckets().get(1); + assertThat(bucket2.getKeyAsString(), startsWith("v1.2.")); + assertEquals(1, bucket2.getDocCount()); + }); // Aggregate on the 'priority' subfield. TermsAggregationBuilder priorityAgg = createTermsAgg("labels.priority"); - SearchResponse priorityResponse = client().prepareSearch("test").addAggregation(priorityAgg).get(); - assertNoFailures(priorityResponse); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(priorityAgg), priorityResponse -> { + Terms priorityTerms = priorityResponse.getAggregations().get("terms"); + assertThat(priorityTerms, notNullValue()); + assertThat(priorityTerms.getName(), equalTo("terms")); + assertThat(priorityTerms.getBuckets().size(), equalTo(1)); - Terms priorityTerms = priorityResponse.getAggregations().get("terms"); - assertThat(priorityTerms, notNullValue()); - assertThat(priorityTerms.getName(), equalTo("terms")); - assertThat(priorityTerms.getBuckets().size(), equalTo(1)); - - Terms.Bucket priorityBucket = priorityTerms.getBuckets().get(0); - assertEquals("urgent", priorityBucket.getKey()); - assertEquals(5, priorityBucket.getDocCount()); + Terms.Bucket priorityBucket = priorityTerms.getBuckets().get(0); + assertEquals("urgent", priorityBucket.getKey()); + assertEquals(5, priorityBucket.getDocCount()); + }); // Aggregate on the 'release' subfield. TermsAggregationBuilder releaseAgg = createTermsAgg("labels.release"); - SearchResponse releaseResponse = client().prepareSearch("test").addAggregation(releaseAgg).get(); - assertNoFailures(releaseResponse); - - Terms releaseTerms = releaseResponse.getAggregations().get("terms"); - assertThat(releaseTerms, notNullValue()); - assertThat(releaseTerms.getName(), equalTo("terms")); - assertThat(releaseTerms.getBuckets().size(), equalTo(5)); - - for (Terms.Bucket bucket : releaseTerms.getBuckets()) { - assertThat(bucket.getKeyAsString(), startsWith("v1.2.")); - assertEquals(1, bucket.getDocCount()); - } + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(releaseAgg), releaseResponse -> { + Terms releaseTerms = releaseResponse.getAggregations().get("terms"); + assertThat(releaseTerms, notNullValue()); + assertThat(releaseTerms.getName(), equalTo("terms")); + assertThat(releaseTerms.getBuckets().size(), equalTo(5)); + + for (Terms.Bucket bucket : releaseTerms.getBuckets()) { + assertThat(bucket.getKeyAsString(), startsWith("v1.2.")); + assertEquals(1, bucket.getDocCount()); + } + }); // Aggregate on the 'priority' subfield with a min_doc_count of 0. TermsAggregationBuilder minDocCountAgg = createTermsAgg("labels.priority").minDocCount(0); - SearchResponse minDocCountResponse = client().prepareSearch("test").addAggregation(minDocCountAgg).get(); - assertNoFailures(minDocCountResponse); - - Terms minDocCountTerms = minDocCountResponse.getAggregations().get("terms"); - assertThat(minDocCountTerms, notNullValue()); - assertThat(minDocCountTerms.getName(), equalTo("terms")); - assertThat(minDocCountTerms.getBuckets().size(), equalTo(1)); + assertNoFailuresAndResponse(client().prepareSearch("test").addAggregation(minDocCountAgg), minDocCountResponse -> { + Terms minDocCountTerms = minDocCountResponse.getAggregations().get("terms"); + assertThat(minDocCountTerms, notNullValue()); + assertThat(minDocCountTerms.getName(), equalTo("terms")); + assertThat(minDocCountTerms.getBuckets().size(), equalTo(1)); + }); } private TermsAggregationBuilder createTermsAgg(String field) { @@ -339,19 +340,22 @@ public void testLoadDocValuesFields() throws Exception { ) .get(); - SearchResponse response = client().prepareSearch("test").addDocValueField("flattened").addDocValueField("flattened.key").get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertNoFailuresAndResponse( + client().prepareSearch("test").addDocValueField("flattened").addDocValueField("flattened.key"), + response -> { + assertHitCount(response, 1); - Map fields = response.getHits().getAt(0).getFields(); + Map fields = response.getHits().getAt(0).getFields(); - DocumentField field = fields.get("flattened"); - assertEquals("flattened", field.getName()); - assertEquals(Arrays.asList("other_value", "value"), field.getValues()); + DocumentField field = fields.get("flattened"); + assertEquals("flattened", field.getName()); + assertEquals(Arrays.asList("other_value", "value"), field.getValues()); - DocumentField keyedField = fields.get("flattened.key"); - assertEquals("flattened.key", keyedField.getName()); - assertEquals("value", keyedField.getValue()); + DocumentField keyedField = fields.get("flattened.key"); + assertEquals("flattened.key", keyedField.getName()); + assertEquals("value", keyedField.getValue()); + } + ); } public void testFieldSort() throws Exception { @@ -386,20 +390,22 @@ public void testFieldSort() throws Exception { .setSource(XContentFactory.jsonBuilder().startObject().startObject("flattened").field("other_key", "E").endObject().endObject()) .get(); - SearchResponse response = client().prepareSearch("test").addSort("flattened", SortOrder.DESC).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "3", "1", "2"); - - response = client().prepareSearch("test").addSort("flattened.key", SortOrder.DESC).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "2", "1", "3"); - - response = client().prepareSearch("test").addSort(new FieldSortBuilder("flattened.key").order(SortOrder.DESC).missing("Z")).get(); - assertNoFailures(response); - assertHitCount(response, 3); - assertOrderedSearchHits(response, "3", "2", "1"); + assertNoFailuresAndResponse(client().prepareSearch("test").addSort("flattened", SortOrder.DESC), response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "3", "1", "2"); + }); + assertNoFailuresAndResponse(client().prepareSearch("test").addSort("flattened.key", SortOrder.DESC), response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "2", "1", "3"); + }); + + assertNoFailuresAndResponse( + client().prepareSearch("test").addSort(new FieldSortBuilder("flattened.key").order(SortOrder.DESC).missing("Z")), + response -> { + assertHitCount(response, 3); + assertOrderedSearchHits(response, "3", "2", "1"); + } + ); } public void testSourceFiltering() { @@ -410,23 +416,32 @@ public void testSourceFiltering() { prepareIndex("test").setId("1").setRefreshPolicy(RefreshPolicy.IMMEDIATE).setSource(source).get(); - SearchResponse response = client().prepareSearch("test").setFetchSource(true).get(); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)); + assertResponse( + client().prepareSearch("test").setFetchSource(true), + response -> assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)) + ); // Check 'include' filtering. - response = client().prepareSearch("test").setFetchSource("headers", null).get(); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)); - - response = client().prepareSearch("test").setFetchSource("headers.content-type", null).get(); - Map filteredSource = Collections.singletonMap( - "headers", - Collections.singletonMap("content-type", "application/json") + assertResponse( + client().prepareSearch("test").setFetchSource("headers", null), + response -> assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(source)) ); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + + assertResponse(client().prepareSearch("test").setFetchSource("headers.content-type", null), response -> { + Map filteredSource = Collections.singletonMap( + "headers", + Collections.singletonMap("content-type", "application/json") + ); + assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + }); // Check 'exclude' filtering. - response = client().prepareSearch("test").setFetchSource(null, "headers.content-type").get(); - filteredSource = Collections.singletonMap("headers", Collections.singletonMap("origin", "https://www.elastic.co")); - assertThat(response.getHits().getAt(0).getSourceAsMap(), equalTo(filteredSource)); + assertResponse( + client().prepareSearch("test").setFetchSource(null, "headers.content-type"), + response -> assertThat( + response.getHits().getAt(0).getSourceAsMap(), + equalTo(Collections.singletonMap("headers", Collections.singletonMap("origin", "https://www.elastic.co"))) + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 270ab3003a1f1..aa787e6343654 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -137,6 +137,7 @@ import static org.elasticsearch.search.SearchService.SEARCH_WORKER_THREADS_ENABLED; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; @@ -241,8 +242,10 @@ protected Settings nodeSettings() { public void testClearOnClose() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -253,8 +256,10 @@ public void testClearOnClose() { public void testClearOnStop() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -265,8 +270,10 @@ public void testClearOnStop() { public void testClearIndexDelete() { createIndex("index"); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -479,8 +486,10 @@ public void testSearchWhileIndexDeletedDoesNotLeakSearchContext() throws Executi public void testBeforeShardLockDuringShardCreate() { IndexService indexService = createIndex("index", Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).build()); prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + assertResponse( + client().prepareSearch("index").setSize(1).setScroll("1m"), + searchResponse -> assertThat(searchResponse.getScrollId(), is(notNullValue())) + ); SearchService service = getInstanceFromNode(SearchService.class); assertEquals(1, service.getActiveContexts()); @@ -787,9 +796,9 @@ public void testMaxOpenScrollContexts() throws Exception { LinkedList clearScrollIds = new LinkedList<>(); for (int i = 0; i < SearchService.MAX_OPEN_SCROLL_CONTEXT.get(Settings.EMPTY); i++) { - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); - - if (randomInt(4) == 0) clearScrollIds.addLast(searchResponse.getScrollId()); + assertResponse(client().prepareSearch("index").setSize(1).setScroll("1m"), searchResponse -> { + if (randomInt(4) == 0) clearScrollIds.addLast(searchResponse.getScrollId()); + }); } ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); @@ -797,7 +806,7 @@ public void testMaxOpenScrollContexts() throws Exception { client().clearScroll(clearScrollRequest); for (int i = 0; i < clearScrollIds.size(); i++) { - client().prepareSearch("index").setSize(1).setScroll("1m").get(); + client().prepareSearch("index").setSize(1).setScroll("1m").get().decRef(); } final ShardScrollRequestTest request = new ShardScrollRequestTest(indexShard.shardId()); @@ -1433,7 +1442,7 @@ public void testDeleteIndexWhileSearch() throws Exception { latch.countDown(); while (stopped.get() == false) { try { - client().prepareSearch("test").setRequestCache(false).get(); + client().prepareSearch("test").setRequestCache(false).get().decRef(); } catch (Exception ignored) { return; } @@ -1635,20 +1644,27 @@ public void testCancelFetchPhaseEarly() throws Exception { service.setOnCreateSearchContext(c -> searchContextCreated.set(true)); // Test fetch phase is cancelled early - String scrollId = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))) - .get() - .getScrollId(); + String scrollId; + var searchResponse = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))).get(); + try { + scrollId = searchResponse.getScrollId(); + } finally { + searchResponse.decRef(); + } - client().searchScroll(new SearchScrollRequest(scrollId)).get(); + client().searchScroll(new SearchScrollRequest(scrollId)).get().decRef(); assertThat(searchContextCreated.get(), is(true)); ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.addScrollId(scrollId); client().clearScroll(clearScrollRequest); - scrollId = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))) - .get() - .getScrollId(); + searchResponse = client().search(searchRequest.allowPartialSearchResults(false).scroll(TimeValue.timeValueMinutes(10))).get(); + try { + scrollId = searchResponse.getScrollId(); + } finally { + searchResponse.decRef(); + } searchContextCreated.set(false); service.setOnCheckCancelled(t -> { SearchShardTask task = new SearchShardTask(randomLong(), "transport", "action", "", TaskId.EMPTY_TASK_ID, emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java b/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java index cfa0087731b60..779e0ad28433a 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoPointShapeQueryTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.geo; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; @@ -24,6 +23,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; public class GeoPointShapeQueryTests extends BasePointShapeQueryTestCase { @@ -78,8 +78,7 @@ public void testFieldAlias() throws IOException { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", point)).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", point)), 1); } private final DatelinePointShapeQueryTestCase dateline = new DatelinePointShapeQueryTestCase(); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 19f0d1e2e88a0..c5d5ecc1f90e8 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -766,16 +766,22 @@ public void testConcurrentSnapshotRestoreAndDeleteOther() { assertEquals(shards, restoreSnapshotResponse.getRestoreInfo().totalShards()); client().search( new SearchRequest("restored_" + index).source(new SearchSourceBuilder().size(0).trackTotalHits(true)), - searchResponseListener + searchResponseListener.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + }) ); }); deterministicTaskQueue.runAllRunnableTasks(); - assertEquals( - documentsFirstSnapshot + documentsSecondSnapshot, - Objects.requireNonNull(safeResult(searchResponseListener).getHits().getTotalHits()).value - ); + var response = safeResult(searchResponseListener); + try { + assertEquals(documentsFirstSnapshot + documentsSecondSnapshot, Objects.requireNonNull(response.getHits().getTotalHits()).value); + } finally { + response.decRef(); + } + assertThat(safeResult(deleteSnapshotStepListener).isAcknowledged(), is(true)); assertThat(safeResult(restoreSnapshotResponseListener).getRestoreInfo().failedShards(), is(0)); diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index a2499c06d6ccc..71030358e901f 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -333,7 +333,13 @@ protected void testSnapshotAndRestore(boolean recreateRepositoryBeforeRestore) t logger.info("--> add random documents to {}", index); addRandomDocuments(index, randomIntBetween(10, 1000)); } else { - int docCount = (int) prepareSearch(index).setSize(0).get().getHits().getTotalHits().value; + var resp = prepareSearch(index).setSize(0).get(); + final int docCount; + try { + docCount = (int) resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } int deleteCount = randomIntBetween(1, docCount); logger.info("--> delete {} random documents from {}", deleteCount, index); for (int i = 0; i < deleteCount; i++) { @@ -403,7 +409,12 @@ public void testMultipleSnapshotAndRollback() throws Exception { addRandomDocuments(indexName, docCount); } // Check number of documents in this iteration - docCounts[i] = (int) prepareSearch(indexName).setSize(0).get().getHits().getTotalHits().value; + var resp = prepareSearch(indexName).setSize(0).get(); + try { + docCounts[i] = (int) resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]); assertSuccessfulSnapshot( clusterAdmin().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName) diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 3f394c1384432..cef8d555b111d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -147,7 +147,8 @@ public void testIndexPointsCircle() throws Exception { try { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)) - .get(); + .get() + .decRef(); } catch (Exception e) { assertThat( e.getCause().getMessage(), diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java index 58328671c58e8..cae57d5137acf 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java @@ -262,7 +262,7 @@ public void testDisallowExpensiveQueries() throws InterruptedException, IOExcept // Set search.allow_expensive_queries to "null" updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); - assertThat(builder.get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(builder, 1); // Set search.allow_expensive_queries to "true" updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", true)); From 143f4208d1bb6942a5be7054d074be970390523e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 4 Dec 2023 19:09:48 +0100 Subject: [PATCH 189/263] Fix remaining leaked SearchResponse issues in :server:integTests (#102896) This should be the last round for this module, found these using a prototype that has `SearchResponse` ref-counted already. --- .../join/query/ChildQuerySearchIT.java | 1 - .../action/search/LookupRuntimeFieldIT.java | 161 +++++----- .../action/search/PointInTimeIT.java | 239 +++++++------- .../action/search/SearchShardsIT.java | 33 +- .../action/search/TransportSearchIT.java | 99 +++--- .../master/IndexingMasterFailoverIT.java | 4 +- .../cluster/MinimumMasterNodesIT.java | 5 +- .../allocation/FilteringAllocationIT.java | 7 +- .../ClusterDisruptionCleanSettingsIT.java | 4 +- .../index/shard/SearchIdleIT.java | 9 +- .../indices/IndicesOptionsIntegrationIT.java | 6 +- .../memory/breaker/CircuitBreakerNoopIT.java | 4 +- .../breaker/CircuitBreakerServiceIT.java | 2 +- .../RandomExceptionCircuitBreakerIT.java | 2 +- .../indices/recovery/IndexRecoveryIT.java | 2 +- .../state/CloseWhileRelocatingShardsIT.java | 31 +- .../indices/stats/IndexStatsIT.java | 60 +--- .../elasticsearch/recovery/RelocationIT.java | 56 ++-- .../elasticsearch/routing/AliasRoutingIT.java | 199 +++--------- .../routing/SimpleRoutingIT.java | 104 ++----- .../search/SearchCancellationIT.java | 20 +- .../basic/TransportTwoNodesSearchIT.java | 89 +++--- .../search/ccs/CrossClusterSearchIT.java | 10 +- .../highlight/HighlighterSearchIT.java | 3 +- .../search/functionscore/QueryRescorerIT.java | 2 +- .../search/query/MultiMatchQueryIT.java | 9 +- .../search/routing/SearchPreferenceIT.java | 14 +- .../routing/SearchReplicaSelectionIT.java | 2 +- .../search/scroll/DuelScrollIT.java | 172 +++++----- .../search/scroll/SearchScrollIT.java | 294 ++++++++++-------- .../SearchScrollWithFailingNodesIT.java | 47 +-- .../search/searchafter/SearchAfterIT.java | 94 +++--- .../search/slice/SearchSliceIT.java | 101 +++--- .../search/source/MetadataFetchingIT.java | 4 +- .../search/stats/FieldUsageStatsIT.java | 6 +- .../suggest/CompletionSuggestSearchIT.java | 8 +- .../snapshots/ConcurrentSnapshotsIT.java | 3 +- .../AbstractSnapshotIntegTestCase.java | 9 +- .../elasticsearch/test/ESIntegTestCase.java | 21 +- 39 files changed, 950 insertions(+), 986 deletions(-) diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index e433ce0b60596..ae1adf4160c2a 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -305,7 +305,6 @@ public void testHasParentFilter() throws Exception { constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false)) ).setSize(numChildDocsPerParent), response -> { - assertNoFailures(response); Set childIds = parentToChildrenEntry.getValue(); assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); for (int i = 0; i < response.getHits().getTotalHits().value; i++) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java index f2e0511ffb7ab..7eaed125156e0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java @@ -24,6 +24,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; public class LookupRuntimeFieldIT extends ESIntegTestCase { @@ -132,90 +134,92 @@ public void populateIndex() throws Exception { } public void testBasic() { - SearchResponse searchResponse = prepareSearch("books").addFetchField("author") - .addFetchField("title") - .addSort("published_date", SortOrder.DESC) - .setSize(3) - .get(); - ElasticsearchAssertions.assertNoFailures(searchResponse); - ElasticsearchAssertions.assertHitCount(searchResponse, 5); + assertNoFailuresAndResponse( + prepareSearch("books").addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.DESC).setSize(3), + searchResponse -> { + ElasticsearchAssertions.assertHitCount(searchResponse, 5); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); - assertThat( - hit0.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) - ); + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); - SearchHit hit1 = searchResponse.getHits().getHits()[1]; - assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); - assertThat( - hit1.field("author").getValues(), - equalTo( - List.of( - Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), - Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) - ) - ) - ); + SearchHit hit1 = searchResponse.getHits().getHits()[1]; + assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); + assertThat( + hit1.field("author").getValues(), + equalTo( + List.of( + Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), + Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) + ) + ) + ); - SearchHit hit2 = searchResponse.getHits().getHits()[2]; - assertThat(hit2.field("title").getValues(), equalTo(List.of("the third book"))); - assertThat( - hit2.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + SearchHit hit2 = searchResponse.getHits().getHits()[2]; + assertThat(hit2.field("title").getValues(), equalTo(List.of("the third book"))); + assertThat( + hit2.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); + } ); } public void testLookupMultipleIndices() throws IOException { - SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping(""" - { - "publisher": { - "type": "lookup", - "target_index": "publishers", - "input_field": "publisher_id", - "target_field": "_id", - "fetch_fields": ["name", "city"] + assertResponse( + prepareSearch("books").setRuntimeMappings(parseMapping(""" + { + "publisher": { + "type": "lookup", + "target_index": "publishers", + "input_field": "publisher_id", + "target_field": "_id", + "fetch_fields": ["name", "city"] + } } - } - """)) - .setFetchSource(false) - .addFetchField("title") - .addFetchField("author") - .addFetchField("publisher") - .addSort("published_date", SortOrder.DESC) - .setSize(2) - .get(); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); - assertThat( - hit0.field("author").getValues(), - equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) - ); - assertThat( - hit0.field("publisher").getValues(), - equalTo(List.of(Map.of("name", List.of("The second publisher"), "city", List.of("Toronto")))) - ); + """)) + .setFetchSource(false) + .addFetchField("title") + .addFetchField("author") + .addFetchField("publisher") + .addSort("published_date", SortOrder.DESC) + .setSize(2), + searchResponse -> { + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + assertThat(hit0.field("title").getValues(), equalTo(List.of("the fifth book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")))) + ); + assertThat( + hit0.field("publisher").getValues(), + equalTo(List.of(Map.of("name", List.of("The second publisher"), "city", List.of("Toronto")))) + ); - SearchHit hit1 = searchResponse.getHits().getHits()[1]; - assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); - assertThat( - hit1.field("author").getValues(), - equalTo( - List.of( - Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), - Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) - ) - ) - ); - assertThat( - hit1.field("publisher").getValues(), - equalTo(List.of(Map.of("name", List.of("The first publisher"), "city", List.of("Montreal", "Vancouver")))) + SearchHit hit1 = searchResponse.getHits().getHits()[1]; + assertThat(hit1.field("title").getValues(), equalTo(List.of("the forth book"))); + assertThat( + hit1.field("author").getValues(), + equalTo( + List.of( + Map.of("first_name", List.of("Mike"), "last_name", List.of("Boston")), + Map.of("first_name", List.of("Jack"), "last_name", List.of("Austin")) + ) + ) + ); + assertThat( + hit1.field("publisher").getValues(), + equalTo(List.of(Map.of("name", List.of("The first publisher"), "city", List.of("Montreal", "Vancouver")))) + ); + } ); } public void testFetchField() throws Exception { - SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping(""" + assertNoFailuresAndResponse(prepareSearch("books").setRuntimeMappings(parseMapping(""" { "author": { "type": "lookup", @@ -225,12 +229,15 @@ public void testFetchField() throws Exception { "fetch_fields": ["first_name", {"field": "joined", "format": "MM/yyyy"}] } } - """)).addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.ASC).setSize(1).get(); - ElasticsearchAssertions.assertNoFailures(searchResponse); - SearchHit hit0 = searchResponse.getHits().getHits()[0]; - // "author", "john", "first_name", "John", "last_name", "New York", "joined", "2020-03-01" - assertThat(hit0.field("title").getValues(), equalTo(List.of("the first book"))); - assertThat(hit0.field("author").getValues(), equalTo(List.of(Map.of("first_name", List.of("John"), "joined", List.of("03/2020"))))); + """)).addFetchField("author").addFetchField("title").addSort("published_date", SortOrder.ASC).setSize(1), searchResponse -> { + SearchHit hit0 = searchResponse.getHits().getHits()[0]; + // "author", "john", "first_name", "John", "last_name", "New York", "joined", "2020-03-01" + assertThat(hit0.field("title").getValues(), equalTo(List.of("the first book"))); + assertThat( + hit0.field("author").getValues(), + equalTo(List.of(Map.of("first_name", List.of("John"), "joined", List.of("03/2020")))) + ); + }); } private Map parseMapping(String mapping) throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index d3e312e173c29..21bbd32e6bf26 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -49,7 +49,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.empty; @@ -83,9 +83,10 @@ public void testBasic() { } refresh("test"); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); - SearchResponse resp1 = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertThat(resp1.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp1, numDocs); + assertResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp1 -> { + assertThat(resp1.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp1, numDocs); + }); int deletedDocs = 0; for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { @@ -96,18 +97,20 @@ public void testBasic() { } refresh("test"); if (randomBoolean()) { - SearchResponse resp2 = prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()).get(); - assertNoFailures(resp2); - assertHitCount(resp2, numDocs - deletedDocs); + final int delDocCount = deletedDocs; + assertNoFailuresAndResponse( + prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()), + resp2 -> assertHitCount(resp2, numDocs - delDocCount) + ); } try { - SearchResponse resp3 = prepareSearch().setPreference(null) - .setQuery(new MatchAllQueryBuilder()) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertNoFailures(resp3); - assertHitCount(resp3, numDocs); - assertThat(resp3.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse( + prepareSearch().setPreference(null).setQuery(new MatchAllQueryBuilder()).setPointInTime(new PointInTimeBuilder(pitId)), + resp3 -> { + assertHitCount(resp3, numDocs); + assertThat(resp3.pointInTimeId(), equalTo(pitId)); + } + ); } finally { closePointInTime(pitId); } @@ -127,27 +130,24 @@ public void testMultipleIndices() { refresh(); String pitId = openPointInTime(new String[] { "*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertNotNull(resp.pointInTimeId()); - assertThat(resp.pointInTimeId(), equalTo(pitId)); int moreDocs = randomIntBetween(10, 50); - for (int i = 0; i < moreDocs; i++) { - String id = "more-" + i; - String index = "index-" + randomIntBetween(1, numIndices); - prepareIndex(index).setId(id).setSource("value", i).get(); - } - refresh(); - resp = prepareSearch().get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs + moreDocs); - - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertNotNull(resp.pointInTimeId()); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertNotNull(resp.pointInTimeId()); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + for (int i = 0; i < moreDocs; i++) { + String id = "more-" + i; + String index = "index-" + randomIntBetween(1, numIndices); + prepareIndex(index).setId(id).setSource("value", i).get(); + } + refresh(); + }); + assertNoFailuresAndResponse(prepareSearch(), resp -> assertHitCount(resp, numDocs + moreDocs)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertNotNull(resp.pointInTimeId()); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); } finally { closePointInTime(pitId); } @@ -187,8 +187,7 @@ public void testIndexFilter() { String[] actualIndices = searchContextId.getActualIndices(); assertEquals(1, actualIndices.length); assertEquals("index-3", actualIndices[0]); - assertResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setSize(50), resp -> { - assertNoFailures(resp); + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setSize(50), resp -> { assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -213,10 +212,10 @@ public void testRelocation() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); final Set dataNodes = clusterService().state() .nodes() .getDataNodes() @@ -233,10 +232,10 @@ public void testRelocation() throws Exception { } refresh(); } - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); assertBusy(() -> { final Set assignedNodes = clusterService().state() .routingTable() @@ -246,10 +245,10 @@ public void testRelocation() throws Exception { .collect(Collectors.toSet()); assertThat(assignedNodes, everyItem(not(in(excludedNodes)))); }, 30, TimeUnit.SECONDS); - resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); } finally { closePointInTime(pitId); } @@ -264,17 +263,21 @@ public void testPointInTimeNotFound() throws Exception { } refresh(); String pit = openPointInTime(new String[] { "index" }, TimeValue.timeValueSeconds(5)); - SearchResponse resp1 = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertNoFailures(resp1); - assertHitCount(resp1, index1); - if (rarely()) { - assertBusy(() -> { - final CommonStats stats = indicesAdmin().prepareStats().setSearch(true).get().getTotal(); - assertThat(stats.search.getOpenContexts(), equalTo(0L)); - }, 60, TimeUnit.SECONDS); - } else { - closePointInTime(resp1.pointInTimeId()); - } + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), resp1 -> { + assertHitCount(resp1, index1); + if (rarely()) { + try { + assertBusy(() -> { + final CommonStats stats = indicesAdmin().prepareStats().setSearch(true).get().getTotal(); + assertThat(stats.search.getOpenContexts(), equalTo(0L)); + }, 60, TimeUnit.SECONDS); + } catch (Exception e) { + throw new AssertionError(e); + } + } else { + closePointInTime(resp1.pointInTimeId()); + } + }); SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, () -> prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get() @@ -302,20 +305,23 @@ public void testIndexNotFound() { refresh(); String pit = openPointInTime(new String[] { "index-*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertNoFailures(resp); - assertHitCount(resp, index1 + index2); + assertNoFailuresAndResponse( + prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), + resp -> assertHitCount(resp, index1 + index2) + ); indicesAdmin().prepareDelete("index-1").get(); if (randomBoolean()) { - resp = prepareSearch("index-*").get(); - assertNoFailures(resp); - assertHitCount(resp, index2); + assertNoFailuresAndResponse(prepareSearch("index-*"), resp -> assertHitCount(resp, index2)); } // Allow partial search result - resp = prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)).get(); - assertFailures(resp); - assertHitCount(resp, index2); + assertResponse( + prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)), + resp -> { + assertFailures(resp); + assertHitCount(resp, index2); + } + ); // Do not allow partial search result expectThrows( @@ -356,14 +362,15 @@ public void testCanMatch() throws Exception { } } prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); - SearchResponse resp = prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) - .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) - .setPreFilterShardSize(randomIntBetween(2, 3)) - .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertThat(resp.getHits().getHits(), arrayWithSize(0)); + assertResponse( + prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setPreference(null) + .setPreFilterShardSize(randomIntBetween(2, 3)) + .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) + .setPointInTime(new PointInTimeBuilder(pitId)), + resp -> assertThat(resp.getHits().getHits(), arrayWithSize(0)) + ); for (String node : internalCluster().nodesInclude("test")) { for (IndexService indexService : internalCluster().getInstance(IndicesService.class, node)) { for (IndexShard indexShard : indexService) { @@ -415,19 +422,20 @@ public void testPartialResults() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test-*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); - assertNoFailures(resp); - assertHitCount(resp, numDocs1 + numDocs2); - assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertHitCount(resp, numDocs1 + numDocs2); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + }); internalCluster().restartNode(assignedNodeForIndex1); - resp = prepareSearch().setPreference(null) - .setAllowPartialSearchResults(true) - .setPointInTime(new PointInTimeBuilder(pitId)) - .get(); - assertFailures(resp); - assertThat(resp.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp, numDocs2); + assertResponse( + prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), + resp -> { + assertFailures(resp); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp, numDocs2); + } + ); } finally { closePointInTime(pitId); } @@ -547,40 +555,45 @@ private void assertPagination(PointInTimeBuilder pit, int expectedNumDocs, int s reverseMuls[i] = expectedSorts.get(i).order() == SortOrder.ASC ? 1 : -1; } SearchResponse response = client().search(searchRequest).get(); - Object[] lastSortValues = null; - while (response.getHits().getHits().length > 0) { - Object[] lastHitSortValues = null; - for (SearchHit hit : response.getHits().getHits()) { - assertTrue(seen.add(hit.getIndex() + hit.getId())); - - if (lastHitSortValues != null) { + try { + Object[] lastSortValues = null; + while (response.getHits().getHits().length > 0) { + Object[] lastHitSortValues = null; + for (SearchHit hit : response.getHits().getHits()) { + assertTrue(seen.add(hit.getIndex() + hit.getId())); + + if (lastHitSortValues != null) { + for (int i = 0; i < expectedSorts.size(); i++) { + Comparable value = (Comparable) hit.getRawSortValues()[i]; + int cmp = value.compareTo(lastHitSortValues[i]) * reverseMuls[i]; + if (cmp != 0) { + assertThat(cmp, equalTo(1)); + break; + } + } + } + lastHitSortValues = hit.getRawSortValues(); + } + int len = response.getHits().getHits().length; + SearchHit last = response.getHits().getHits()[len - 1]; + if (lastSortValues != null) { for (int i = 0; i < expectedSorts.size(); i++) { - Comparable value = (Comparable) hit.getRawSortValues()[i]; - int cmp = value.compareTo(lastHitSortValues[i]) * reverseMuls[i]; + Comparable value = (Comparable) last.getSortValues()[i]; + int cmp = value.compareTo(lastSortValues[i]) * reverseMuls[i]; if (cmp != 0) { assertThat(cmp, equalTo(1)); break; } } } - lastHitSortValues = hit.getRawSortValues(); - } - int len = response.getHits().getHits().length; - SearchHit last = response.getHits().getHits()[len - 1]; - if (lastSortValues != null) { - for (int i = 0; i < expectedSorts.size(); i++) { - Comparable value = (Comparable) last.getSortValues()[i]; - int cmp = value.compareTo(lastSortValues[i]) * reverseMuls[i]; - if (cmp != 0) { - assertThat(cmp, equalTo(1)); - break; - } - } + assertThat(last.getSortValues().length, equalTo(expectedSorts.size())); + lastSortValues = last.getSortValues(); + searchRequest.source().searchAfter(last.getSortValues()); + response.decRef(); + response = client().search(searchRequest).get(); } - assertThat(last.getSortValues().length, equalTo(expectedSorts.size())); - lastSortValues = last.getSortValues(); - searchRequest.source().searchAfter(last.getSortValues()); - response = client().search(searchRequest).get(); + } finally { + response.decRef(); } assertThat(seen.size(), equalTo(expectedNumDocs)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java index 8b1acf11a7a5d..7da015052fe82 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/SearchShardsIT.java @@ -24,7 +24,9 @@ import java.util.Collection; import java.util.Queue; +import java.util.concurrent.ExecutionException; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; @@ -105,7 +107,7 @@ public void testBasic() { } } - public void testRandom() { + public void testRandom() throws ExecutionException, InterruptedException { int numIndices = randomIntBetween(1, 10); for (int i = 0; i < numIndices; i++) { String index = "index-" + i; @@ -127,21 +129,22 @@ public void testRandom() { RangeQueryBuilder rangeQuery = new RangeQueryBuilder("value").from(from).to(to).includeUpper(true).includeLower(true); SearchRequest searchRequest = new SearchRequest().indices("index-*").source(new SearchSourceBuilder().query(rangeQuery)); searchRequest.setPreFilterShardSize(1); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - var searchShardsRequest = new SearchShardsRequest( - new String[] { "index-*" }, - SearchRequest.DEFAULT_INDICES_OPTIONS, - rangeQuery, - null, - preference, - randomBoolean(), - randomBoolean() ? null : randomAlphaOfLength(10) - ); - var searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); + assertResponse(client().search(searchRequest), searchResponse -> { + var searchShardsRequest = new SearchShardsRequest( + new String[] { "index-*" }, + SearchRequest.DEFAULT_INDICES_OPTIONS, + rangeQuery, + null, + preference, + randomBoolean(), + randomBoolean() ? null : randomAlphaOfLength(10) + ); + var searchShardsResponse = client().execute(TransportSearchShardsAction.TYPE, searchShardsRequest).actionGet(); - assertThat(searchShardsResponse.getGroups(), hasSize(searchResponse.getTotalShards())); - long skippedShards = searchShardsResponse.getGroups().stream().filter(SearchShardsGroup::skipped).count(); - assertThat(skippedShards, equalTo((long) searchResponse.getSkippedShards())); + assertThat(searchShardsResponse.getGroups(), hasSize(searchResponse.getTotalShards())); + long skippedShards = searchShardsResponse.getGroups().stream().filter(SearchShardsGroup::skipped).count(); + assertThat(skippedShards, equalTo((long) searchResponse.getSkippedShards())); + }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index 31ffe560be010..5bb21dc874747 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -74,6 +74,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -121,7 +122,7 @@ protected Collection> nodePlugins() { return Collections.singletonList(TestPlugin.class); } - public void testLocalClusterAlias() { + public void testLocalClusterAlias() throws ExecutionException, InterruptedException { long nowInMillis = randomLongBetween(0, Long.MAX_VALUE); IndexRequest indexRequest = new IndexRequest("test"); indexRequest.id("1"); @@ -140,14 +141,15 @@ public void testLocalClusterAlias() { nowInMillis, randomBoolean() ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - SearchHit[] hits = searchResponse.getHits().getHits(); - assertEquals(1, hits.length); - SearchHit hit = hits[0]; - assertEquals("local", hit.getClusterAlias()); - assertEquals("test", hit.getIndex()); - assertEquals("1", hit.getId()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertEquals(1, hits.length); + SearchHit hit = hits[0]; + assertEquals("local", hit.getClusterAlias()); + assertEquals("test", hit.getIndex()); + assertEquals("1", hit.getId()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -158,14 +160,15 @@ public void testLocalClusterAlias() { nowInMillis, randomBoolean() ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - SearchHit[] hits = searchResponse.getHits().getHits(); - assertEquals(1, hits.length); - SearchHit hit = hits[0]; - assertEquals("", hit.getClusterAlias()); - assertEquals("test", hit.getIndex()); - assertEquals("1", hit.getId()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + SearchHit[] hits = searchResponse.getHits().getHits(); + assertEquals(1, hits.length); + SearchHit hit = hits[0]; + assertEquals("", hit.getClusterAlias()); + assertEquals("test", hit.getIndex()); + assertEquals("1", hit.getId()); + }); } } @@ -193,8 +196,7 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce { SearchRequest searchRequest = new SearchRequest(""); searchRequest.indicesOptions(IndicesOptions.fromOptions(true, true, true, true)); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(0, searchResponse.getTotalShards()); + assertResponse(client().search(searchRequest), searchResponse -> assertEquals(0, searchResponse.getTotalShards())); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -217,9 +219,10 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce randomBoolean() ); searchRequest.indices(""); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -236,13 +239,14 @@ public void testAbsoluteStartMillis() throws ExecutionException, InterruptedExce rangeQuery.lt("1982-01-01"); sourceBuilder.query(rangeQuery); searchRequest.source(sourceBuilder); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); + }); } } - public void testFinalReduce() { + public void testFinalReduce() throws ExecutionException, InterruptedException { long nowInMillis = randomLongBetween(0, Long.MAX_VALUE); TaskId taskId = new TaskId("node", randomNonNegativeLong()); { @@ -274,11 +278,12 @@ public void testFinalReduce() { SearchRequest searchRequest = randomBoolean() ? originalRequest : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); - LongTerms longTerms = aggregations.get("terms"); - assertEquals(1, longTerms.getBuckets().size()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(2, searchResponse.getHits().getTotalHits().value); + Aggregations aggregations = searchResponse.getAggregations(); + LongTerms longTerms = aggregations.get("terms"); + assertEquals(1, longTerms.getBuckets().size()); + }); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -289,11 +294,12 @@ public void testFinalReduce() { nowInMillis, false ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); - Aggregations aggregations = searchResponse.getAggregations(); - LongTerms longTerms = aggregations.get("terms"); - assertEquals(2, longTerms.getBuckets().size()); + assertResponse(client().search(searchRequest), searchResponse -> { + assertEquals(2, searchResponse.getHits().getTotalHits().value); + Aggregations aggregations = searchResponse.getAggregations(); + LongTerms longTerms = aggregations.get("terms"); + assertEquals(2, longTerms.getBuckets().size()); + }); } } @@ -309,7 +315,7 @@ public void testWaitForRefreshIndexValidation() throws Exception { Arrays.fill(validCheckpoints, SequenceNumbers.UNASSIGNED_SEQ_NO); // no exception - prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get(); + prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get().decRef(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -373,7 +379,7 @@ public void testShardCountLimit() throws Exception { assertAcked(prepareCreate("test2").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numPrimaries2))); // no exception - prepareSearch("test1").get(); + prepareSearch("test1").get().decRef(); updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1 - 1)); @@ -386,7 +392,7 @@ public void testShardCountLimit() throws Exception { updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1)); // no exception - prepareSearch("test1").get(); + prepareSearch("test1").get().decRef(); e = expectThrows(IllegalArgumentException.class, () -> prepareSearch("test1", "test2").get()); assertThat( @@ -422,12 +428,13 @@ public void testSearchIdle() throws Exception { prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get(); - assertBusy(() -> { - SearchResponse resp = prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) - .setPreFilterShardSize(randomIntBetween(1, 3)) - .get(); - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); - }); + assertBusy( + () -> assertResponse( + prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + .setPreFilterShardSize(randomIntBetween(1, 3)), + resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L)) + ) + ); } public void testCircuitBreakerReduceFail() throws Exception { @@ -471,7 +478,7 @@ public void onFailure(Exception e) { assertBusy(() -> { Exception exc = expectThrows( Exception.class, - () -> client.prepareSearch("test").addAggregation(new TestAggregationBuilder("test")).get() + () -> client.prepareSearch("test").addAggregation(new TestAggregationBuilder("test")).get().decRef() ); assertThat(exc.getCause().getMessage(), containsString("")); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 837c55e81b471..1887e37cbbf47 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -20,7 +20,7 @@ import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) public class IndexingMasterFailoverIT extends ESIntegTestCase { @@ -97,7 +97,7 @@ public void run() { ensureGreen("myindex"); refresh(); - assertThat(prepareSearch("myindex").get().getHits().getTotalHits().value, equalTo(10L)); + assertHitCount(prepareSearch("myindex"), 10); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 746ddc56870ae..09c14df3566af 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -105,10 +105,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { logger.info("--> verify we get the data back"); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(100L) - ); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } String masterNode = internalCluster().getMasterName(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java index 04fba1f46074f..33719df372fb1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java @@ -31,6 +31,7 @@ import java.util.Set; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) @@ -51,7 +52,7 @@ public void testDecommissionNodeNoReplicas() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); final boolean closed = randomBoolean(); if (closed) { @@ -79,7 +80,7 @@ public void testDecommissionNodeNoReplicas() { } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } public void testAutoExpandReplicasToFilteredNodes() { @@ -132,7 +133,7 @@ public void testDisablingAllocationFiltering() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get(); } indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); final boolean closed = randomBoolean(); if (closed) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java index 5ea78a6b1e3a0..e8234fb09512b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java @@ -23,7 +23,7 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class ClusterDisruptionCleanSettingsIT extends ESIntegTestCase { @@ -63,6 +63,6 @@ public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Excep IndicesStoreIntegrationIT.relocateAndBlockCompletion(logger, "test", 0, node_1, node_2); // now search for the documents and see if we get a reply - assertThat(prepareSearch().setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch().setSize(0), 100); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java index e1ab2bdc2369e..1a8f928d9c10f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java @@ -48,7 +48,14 @@ public class SearchIdleIT extends ESSingleNodeTestCase { public void testAutomaticRefreshSearch() throws InterruptedException { - runTestAutomaticRefresh(numDocs -> client().prepareSearch("test").get().getHits().getTotalHits().value); + runTestAutomaticRefresh(numDocs -> { + var resp = client().prepareSearch("test").get(); + try { + return resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } + }); } public void testAutomaticRefreshGet() throws InterruptedException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index ce3fd98476725..658b9eadd772f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -408,7 +408,7 @@ public void testAllMissingStrict() throws Exception { expectThrows(IndexNotFoundException.class, () -> prepareSearch("test2", "test3").setQuery(matchAllQuery()).get()); // you should still be able to run empty searches without things blowing up - prepareSearch().setQuery(matchAllQuery()).get(); + prepareSearch().setQuery(matchAllQuery()).get().decRef(); } // For now don't handle closed indices @@ -681,7 +681,7 @@ private static void verify(ActionRequestBuilder requestBuilder, boolean fa }); } else { try { - requestBuilder.get(); + requestBuilder.get().decRef(); fail("IndexNotFoundException or IndexClosedException was expected"); } catch (IndexNotFoundException | IndexClosedException e) {} } @@ -694,7 +694,7 @@ private static void verify(ActionRequestBuilder requestBuilder, boolean fa assertThat(response.getResponses()[0].getResponse(), notNullValue()); }); } else { - requestBuilder.get(); + requestBuilder.get().decRef(); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java index f9f17d8e1ebbf..dd29823f8076f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerNoopIT.java @@ -51,7 +51,7 @@ public void testNoopRequestBreaker() throws Exception { indexRandom(true, reqs); // A cardinality aggregation uses BigArrays and thus the REQUEST breaker - client.prepareSearch("cb-test").setQuery(matchAllQuery()).addAggregation(cardinality("card").field("test")).get(); + client.prepareSearch("cb-test").setQuery(matchAllQuery()).addAggregation(cardinality("card").field("test")).get().decRef(); // no exception because the breaker is a noop } @@ -68,7 +68,7 @@ public void testNoopFielddataBreaker() throws Exception { indexRandom(true, reqs); // Sorting using fielddata and thus the FIELDDATA breaker - client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); + client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get().decRef(); // no exception because the breaker is a noop } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index e726c8a08002a..705fb879e9125 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -193,7 +193,7 @@ public void testRamAccountingTermsEnum() throws Exception { indexRandom(true, false, true, reqs); // execute a search that loads field data (sorting on the "test" field) - client.prepareSearch("ramtest").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); + client.prepareSearch("ramtest").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get().decRef(); // clear field data cache (thus setting the loaded field data back to 0) clearFieldData(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 6a52159c71ab9..2935efb4808a7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -159,7 +159,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc boolean success = false; try { // Sort by the string and numeric fields, to load them into field data - searchRequestBuilder.get(); + searchRequestBuilder.get().decRef(); success = true; } catch (SearchPhaseExecutionException ex) { logger.info("expected SearchPhaseException: [{}]", ex.getMessage()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 762bbdda77df1..2cbc3477cb49d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -933,7 +933,7 @@ private IndicesStatsResponse createAndPopulateIndex(String name, int nodeCount, indexRandom(true, docs); flush(); - assertThat(prepareSearch(name).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs)); + assertHitCount(prepareSearch(name).setSize(0), numDocs); return indicesAdmin().prepareStats(name).get(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index 77d38410d1ea9..b66a0b0f3be44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -48,6 +48,7 @@ import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsClosed; import static org.elasticsearch.indices.state.CloseIndexIT.assertIndexIsOpened; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -241,20 +242,22 @@ public void testCloseWhileRelocatingShards() throws Exception { ensureGreen(indices); for (String index : acknowledgedCloses) { - long docsCount = prepareSearch(index).setSize(0).setTrackTotalHits(true).get().getHits().getTotalHits().value; - assertEquals( - "Expected " - + docsPerIndex.get(index) - + " docs in index " - + index - + " but got " - + docsCount - + " (close acknowledged=" - + acknowledgedCloses.contains(index) - + ")", - (long) docsPerIndex.get(index), - docsCount - ); + assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> { + long docsCount = response.getHits().getTotalHits().value; + assertEquals( + "Expected " + + docsPerIndex.get(index) + + " docs in index " + + index + + " but got " + + docsCount + + " (close acknowledged=" + + acknowledgedCloses.contains(index) + + ")", + (long) docsPerIndex.get(index), + docsCount + ); + }); } } finally { updateClusterSettings(Settings.builder().putNull(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java index ec62a1cbbd9bf..a98297e8b49ae 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -82,6 +82,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.emptyCollectionOf; @@ -150,8 +151,8 @@ public void testFieldDataStats() { assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L)); // sort to load it to field data... - prepareSearch().addSort("field", SortOrder.ASC).get(); - prepareSearch().addSort("field", SortOrder.ASC).get(); + prepareSearch().addSort("field", SortOrder.ASC).get().decRef(); + prepareSearch().addSort("field", SortOrder.ASC).get().decRef(); nodesStats = clusterAdmin().prepareNodesStats("data:true").setIndices(true).get(); assertThat( @@ -166,8 +167,8 @@ public void testFieldDataStats() { assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); // sort to load it to field data... - prepareSearch().addSort("field2", SortOrder.ASC).get(); - prepareSearch().addSort("field2", SortOrder.ASC).get(); + prepareSearch().addSort("field2", SortOrder.ASC).get().decRef(); + prepareSearch().addSort("field2", SortOrder.ASC).get().decRef(); // now check the per field stats nodesStats = clusterAdmin().prepareNodesStats("data:true") @@ -264,8 +265,8 @@ public void testClearAllCaches() throws Exception { assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); // sort to load it to field data and filter to load filter cache - prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value1")).addSort("field", SortOrder.ASC).get(); - prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value2")).addSort("field", SortOrder.ASC).get(); + prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value1")).addSort("field", SortOrder.ASC).get().decRef(); + prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value2")).addSort("field", SortOrder.ASC).get().decRef(); nodesStats = clusterAdmin().prepareNodesStats("data:true").setIndices(true).get(); assertThat( @@ -355,10 +356,7 @@ public void testQueryCache() throws Exception { assertThat(indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), equalTo(0L)); assertThat(indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), equalTo(0L)); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -389,10 +387,7 @@ public void testQueryCache() throws Exception { }); for (int i = 0; i < 10; i++) { - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -407,29 +402,13 @@ public void testQueryCache() throws Exception { // test explicit request parameter - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(false) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(false), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L) ); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(true) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(true), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -440,24 +419,13 @@ public void testQueryCache() throws Exception { indicesAdmin().prepareClearCache().setRequestCache(true).get(); // clean the cache updateIndexSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), false), "idx"); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0L) ); - assertThat( - prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) - .setSize(0) - .setRequestCache(true) - .get() - .getHits() - .getTotalHits().value, - equalTo((long) numDocs) - ); + assertHitCount(prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).setRequestCache(true), numDocs); assertThat( indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0L) @@ -983,7 +951,7 @@ public void testGroupsParam() throws Exception { prepareIndex("test1").setId(Integer.toString(1)).setSource("foo", "bar").get(); refresh(); - prepareSearch("_all").setStats("bar", "baz").get(); + prepareSearch("_all").setStats("bar", "baz").get().decRef(); IndicesStatsRequestBuilder builder = indicesAdmin().prepareStats(); IndicesStatsResponse stats = builder.get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index e53bcb0480d7b..0e14d80aaa0cd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -43,7 +43,6 @@ import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -80,6 +79,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; @@ -133,7 +133,7 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> verifying count"); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20L); logger.info("--> start another node"); final String node_2 = internalCluster().startNode(); @@ -155,7 +155,7 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> verifying count again..."); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20); } public void testRelocationWhileIndexingRandom() throws Exception { @@ -229,35 +229,31 @@ public void testRelocationWhileIndexingRandom() throws Exception { logger.info("--> refreshing the index"); indicesAdmin().prepareRefresh("test").get(); logger.info("--> searching the index"); - boolean ranOnce = false; for (int i = 0; i < 10; i++) { + final int idx = i; logger.info("--> START search test round {}", i + 1); - SearchHits hits = prepareSearch("test").setQuery(matchAllQuery()) - .setSize((int) indexer.totalIndexedDocs()) - .storedFields() - .get() - .getHits(); - ranOnce = true; - if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { - int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; - for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { - hitIds[hit] = hit + 1; - } - Set set = Arrays.stream(hitIds).boxed().collect(Collectors.toSet()); - for (SearchHit hit : hits.getHits()) { - int id = Integer.parseInt(hit.getId()); - if (set.remove(id) == false) { - logger.error("Extra id [{}]", id); + assertResponse( + prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(), + response -> { + var hits = response.getHits(); + if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { + int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; + for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { + hitIds[hit] = hit + 1; + } + Set set = Arrays.stream(hitIds).boxed().collect(Collectors.toSet()); + for (SearchHit hit : hits.getHits()) { + int id = Integer.parseInt(hit.getId()); + if (set.remove(id) == false) { + logger.error("Extra id [{}]", id); + } + } + set.forEach(value -> logger.error("Missing id [{}]", value)); } + assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); + logger.info("--> DONE search test round {}", idx + 1); } - set.forEach(value -> logger.error("Missing id [{}]", value)); - } - assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); - logger.info("--> DONE search test round {}", i + 1); - - } - if (ranOnce == false) { - fail(); + ); } } } @@ -570,7 +566,7 @@ public void testRelocateWhileWaitingForRefresh() { logger.info("--> verifying count"); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(20L)); + assertHitCount(prepareSearch("test").setSize(0), 20); } public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception { @@ -636,7 +632,7 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E assertTrue(pendingIndexResponses.stream().allMatch(ActionFuture::isDone)); }, 1, TimeUnit.MINUTES); - assertThat(prepareSearch("test").setSize(0).get().getHits().getTotalHits().value, equalTo(120L)); + assertHitCount(prepareSearch("test").setSize(0), 120); } public void testRelocationEstablishedPeerRecoveryRetentionLeases() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index 442a2dc99bda3..8fb56d17b93ff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.xcontent.XContentFactory; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; @@ -116,45 +117,24 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - - assertThat(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(0L)); - - assertThat( - prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> indexing with id [2], and routing [1] using alias"); @@ -162,111 +142,50 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with 0 routing, should find one"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with 1 routing, should find one"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with 0,1 indexRoutings , should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting("0", "1") - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); - assertThat(prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setRouting("0", "1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with two routing aliases , should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with alias0, alias1 and alias01, should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias0", "alias1", "alias01").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias0", "alias1", "alias01").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with test, alias0 and alias1, should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("test", "alias0", "alias1").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("test", "alias0", "alias1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -316,43 +235,20 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { logger.info("--> search with alias-a1,alias-b0, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - assertThat( - prepareSearch("alias-a1", "alias-b0").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias-a1", "alias-b0").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with alias-ab, should find two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias-ab").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with alias-a0,alias-b1 should find two"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch("alias-a0", "alias-b1").setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias-a0", "alias-b1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -374,7 +270,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue2682() thro logger.info("--> search all on index_* should find two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); + assertHitCount(prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()), 2); } } @@ -420,11 +316,8 @@ public void testIndexingAliasesOverTime() throws Exception { logger.info("--> verifying get and search with routing, should find"); for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "0").setRouting("3").get().isExists(), equalTo(true)); - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> creating alias with routing [4]"); @@ -432,11 +325,8 @@ public void testIndexingAliasesOverTime() throws Exception { logger.info("--> verifying search with wrong routing should not find"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> creating alias with search routing [3,4] and index routing 4"); @@ -453,11 +343,8 @@ public void testIndexingAliasesOverTime() throws Exception { for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "0").setRouting("3").get().isExists(), equalTo(true)); assertThat(client().prepareGet("test", "1").setRouting("4").get().isExists(), equalTo(true)); - assertThat(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch("alias").setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java index 772d8767b7dd0..f59ec4d42089e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -35,6 +35,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentFactory; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -134,36 +135,19 @@ public void testSimpleSearchRouting() { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 1L); } logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); - assertThat( - prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); + assertHitCount(prepareSearch().setSize(0).setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 0); } logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } String secondRoutingValue = "1"; @@ -176,86 +160,42 @@ public void testSimpleSearchRouting() { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { - assertThat(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L)); - assertThat( - prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(2L) - ); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 2); } logger.info("--> search with {} routing, should find one", routingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(routingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with {} routing, should find one", secondRoutingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, - equalTo(1L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(1L) - ); + assertHitCount(prepareSearch().setRouting("1").setQuery(QueryBuilders.matchAllQuery()), 1); + assertHitCount(prepareSearch().setSize(0).setRouting(secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), 1); } logger.info("--> search with {},{} indexRoutings , should find two", routingValue, "1"); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue, secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) - ); - assertThat( - prepareSearch().setSize(0) - .setRouting(routingValue, secondRoutingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + assertHitCount(prepareSearch().setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), 2); + assertHitCount( + prepareSearch().setSize(0).setRouting(routingValue, secondRoutingValue).setQuery(QueryBuilders.matchAllQuery()), + 2 ); } logger.info("--> search with {},{},{} indexRoutings , should find two", routingValue, secondRoutingValue, routingValue); for (int i = 0; i < 5; i++) { - assertThat( - prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + assertHitCount( + prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue).setQuery(QueryBuilders.matchAllQuery()), + 2 ); - assertThat( + assertHitCount( prepareSearch().setSize(0) .setRouting(routingValue, secondRoutingValue, routingValue) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, - equalTo(2L) + .setQuery(QueryBuilders.matchAllQuery()), + 2 ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 19dfe598b5318..aaf218e3579be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -169,21 +169,25 @@ public void testCancellationOfScrollSearchesOnFollowupRequests() throws Exceptio logger.info("Executing search"); TimeValue keepAlive = TimeValue.timeValueSeconds(5); + String scrollId; SearchResponse searchResponse = prepareSearch("test").setScroll(keepAlive) .setSize(2) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))) .get(); + try { + assertNotNull(searchResponse.getScrollId()); - assertNotNull(searchResponse.getScrollId()); + // Enable block so the second request would block + for (ScriptedBlockPlugin plugin : plugins) { + plugin.reset(); + plugin.enableBlock(); + } - // Enable block so the second request would block - for (ScriptedBlockPlugin plugin : plugins) { - plugin.reset(); - plugin.enableBlock(); + scrollId = searchResponse.getScrollId(); + logger.info("Executing scroll with id {}", scrollId); + } finally { + searchResponse.decRef(); } - - String scrollId = searchResponse.getScrollId(); - logger.info("Executing scroll with id {}", scrollId); ActionFuture scrollResponse = client().prepareSearchScroll(searchResponse.getScrollId()) .setScroll(keepAlive) .execute(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index e18c37aff783b..d4a4debbd61d6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -147,9 +147,11 @@ public void testDfsQueryThenFetch() throws Exception { ); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -184,9 +186,11 @@ public void testDfsQueryThenFetchWithSort() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -214,9 +218,11 @@ public void testQueryThenFetch() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(100 - total - i - 1))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -227,26 +233,29 @@ public void testQueryThenFetchWithFrom() throws Exception { Set collectedIds = new TreeSet<>(); - SearchResponse searchResponse = client().search( - new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH) - ).actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(60)); - for (int i = 0; i < 60; i++) { - SearchHit hit = searchResponse.getHits().getHits()[i]; - collectedIds.add(hit.getId()); - } - searchResponse = client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)) - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(40)); - for (int i = 0; i < 40; i++) { - SearchHit hit = searchResponse.getHits().getHits()[i]; - collectedIds.add(hit.getId()); - } - assertThat(collectedIds, equalTo(fullExpectedIds)); + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(60)); + for (int i = 0; i < 60; i++) { + SearchHit hit = searchResponse.getHits().getHits()[i]; + collectedIds.add(hit.getId()); + } + } + ); + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(40)); + for (int i = 0; i < 40; i++) { + SearchHit hit = searchResponse.getHits().getHits()[i]; + collectedIds.add(hit.getId()); + } + assertThat(collectedIds, equalTo(fullExpectedIds)); + } + ); } public void testQueryThenFetchWithSort() throws Exception { @@ -272,9 +281,11 @@ public void testQueryThenFetchWithSort() throws Exception { assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i))); } total += hits.length; + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get(); } clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); assertEquals(100, total); } @@ -327,23 +338,27 @@ public void testFailedSearchWithWrongFrom() throws Exception { logger.info("Start Testing failed search with wrong from"); SearchSourceBuilder source = searchSource().query(termQuery("multi", "test")).from(1000).size(20).explain(true); - SearchResponse response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertThat(response.getHits().getHits().length, equalTo(0)); - assertThat(response.getTotalShards(), equalTo(test.numPrimaries)); - assertThat(response.getSuccessfulShards(), equalTo(test.numPrimaries)); - assertThat(response.getFailedShards(), equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); - - response = client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet(); - assertNoFailures(response); - assertThat(response.getHits().getHits().length, equalTo(0)); + assertResponse(client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), response -> { + assertThat(response.getHits().getHits().length, equalTo(0)); + assertThat(response.getTotalShards(), equalTo(test.numPrimaries)); + assertThat(response.getSuccessfulShards(), equalTo(test.numPrimaries)); + assertThat(response.getFailedShards(), equalTo(0)); + }); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); + + assertNoFailuresAndResponse( + client().search(new SearchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)), + response -> assertThat(response.getHits().getHits().length, equalTo(0)) + ); logger.info("Done Testing failed search"); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 379cdfc990207..d21619f4e6f89 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -372,7 +372,10 @@ public void testClusterDetailsAfterCCSWithFailuresOnRemoteClusterOnly() throws E boolean minimizeRoundtrips = TransportSearchAction.shouldMinimizeRoundtrips(searchRequest); - client(LOCAL_CLUSTER).search(searchRequest, queryFuture); + client(LOCAL_CLUSTER).search(searchRequest, queryFuture.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + })); assertBusy(() -> assertTrue(queryFuture.isDone())); // dfs=true overrides the minimize_roundtrips=true setting and does not minimize roundtrips @@ -612,7 +615,10 @@ public void testRemoteClusterOnlyCCSWithFailuresOnAllShards() throws Exception { boolean minimizeRoundtrips = TransportSearchAction.shouldMinimizeRoundtrips(searchRequest); - client(LOCAL_CLUSTER).search(searchRequest, queryFuture); + client(LOCAL_CLUSTER).search(searchRequest, queryFuture.delegateFailure((l, r) -> { + r.incRef(); + l.onResponse(r); + })); assertBusy(() -> assertTrue(queryFuture.isDone())); if (skipUnavailable == false || minimizeRoundtrips == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 5c189c0c6c96a..ab72dbd4db707 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -378,8 +378,7 @@ public void testEnsureNoNegativeOffsets() throws Exception { assertNotHighlighted( prepareSearch().setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) - .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")) - .get(), + .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")), 0, "no_long_term" ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index c608c253c851b..c67bdf82b5c2c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -803,7 +803,7 @@ public void testFromSize() throws Exception { request.setSize(4); request.addRescorer(new QueryRescorerBuilder(matchAllQuery()), 50); - assertEquals(4, request.get().getHits().getHits().length); + assertResponse(request, response -> assertEquals(4, response.getHits().getHits().length)); } public void testRescorePhaseWithInvalidSort() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 2d77e170abdc5..2d6bb8176b091 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -447,8 +447,13 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio } public void testEquivalence() { - - final int numDocs = (int) prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value; + var response = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); + final int numDocs; + try { + numDocs = (int) response.getHits().getTotalHits().value; + } finally { + response.decRef(); + } int numIters = scaledRandomIntBetween(5, 10); for (int i = 0; i < numIters; i++) { { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 20b9ce38254c3..433f004acdd77 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -232,13 +232,13 @@ public void testCustomPreferenceUnaffectedByOtherShardMovements() { final String customPreference = randomAlphaOfLength(10); - final String nodeId = prepareSearch("test").setQuery(matchAllQuery()) - .setPreference(customPreference) - .get() - .getHits() - .getAt(0) - .getShard() - .getNodeId(); + final String nodeId; + var response = prepareSearch("test").setQuery(matchAllQuery()).setPreference(customPreference).get(); + try { + nodeId = response.getHits().getAt(0).getShard().getNodeId(); + } finally { + response.decRef(); + } assertSearchesSpecificNode("test", customPreference, nodeId); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 1362b0166a709..816fe48e5d97f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -65,7 +65,7 @@ public void testNodeSelection() { // Now after more searches, we should select a node with the lowest ARS rank. for (int i = 0; i < 5; i++) { - client.prepareSearch().setQuery(matchAllQuery()).get(); + client.prepareSearch().setQuery(matchAllQuery()).get().decRef(); } ClusterStateResponse clusterStateResponse = client.admin().cluster().prepareState().get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index e89e51a60fa23..036467b8d0774 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -30,6 +30,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -37,56 +38,61 @@ public class DuelScrollIT extends ESIntegTestCase { public void testDuelQueryThenFetch() throws Exception { TestContext context = create(SearchType.DFS_QUERY_THEN_FETCH, SearchType.QUERY_THEN_FETCH); - SearchResponse control = prepareSearch("index").setSearchType(context.searchType) - .addSort(context.sort) - .setSize(context.numDocs) - .get(); - assertNoFailures(control); - SearchHits sh = control.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); - assertThat(sh.getHits().length, equalTo(context.numDocs)); + assertNoFailuresAndResponse( + prepareSearch("index").setSearchType(context.searchType).addSort(context.sort).setSize(context.numDocs), + control -> { + SearchHits sh = control.getHits(); + assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(sh.getHits().length, equalTo(context.numDocs)); - SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) - .addSort(context.sort) - .setSize(context.scrollRequestSize) - .setScroll("10m") - .get(); + SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) + .addSort(context.sort) + .setSize(context.scrollRequestSize) + .setScroll("10m") + .get(); + try { - assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); - assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); + assertNoFailures(searchScrollResponse); + assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); - int counter = 0; - for (SearchHit hit : searchScrollResponse.getHits()) { - assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); - } + int counter = 0; + for (SearchHit hit : searchScrollResponse.getHits()) { + assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); + } - int iter = 1; - String scrollId = searchScrollResponse.getScrollId(); - while (true) { - searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll("10m").get(); - assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); - if (searchScrollResponse.getHits().getHits().length == 0) { - break; - } + int iter = 1; + String scrollId = searchScrollResponse.getScrollId(); + while (true) { + searchScrollResponse.decRef(); + searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll("10m").get(); + assertNoFailures(searchScrollResponse); + assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + if (searchScrollResponse.getHits().getHits().length == 0) { + break; + } - int expectedLength; - int scrollSlice = ++iter * context.scrollRequestSize; - if (scrollSlice <= context.numDocs) { - expectedLength = context.scrollRequestSize; - } else { - expectedLength = context.scrollRequestSize - (scrollSlice - context.numDocs); - } - assertThat(searchScrollResponse.getHits().getHits().length, equalTo(expectedLength)); - for (SearchHit hit : searchScrollResponse.getHits()) { - assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); - } - scrollId = searchScrollResponse.getScrollId(); - } + int expectedLength; + int scrollSlice = ++iter * context.scrollRequestSize; + if (scrollSlice <= context.numDocs) { + expectedLength = context.scrollRequestSize; + } else { + expectedLength = context.scrollRequestSize - (scrollSlice - context.numDocs); + } + assertThat(searchScrollResponse.getHits().getHits().length, equalTo(expectedLength)); + for (SearchHit hit : searchScrollResponse.getHits()) { + assertThat(hit.getSortValues()[0], equalTo(sh.getAt(counter++).getSortValues()[0])); + } + scrollId = searchScrollResponse.getScrollId(); + } - assertThat(counter, equalTo(context.numDocs)); - clearScroll(scrollId); + assertThat(counter, equalTo(context.numDocs)); + clearScroll(scrollId); + } finally { + searchScrollResponse.decRef(); + } + } + ); } private TestContext create(SearchType... searchTypes) throws Exception { @@ -213,47 +219,51 @@ private int createIndex(boolean singleShard) throws Exception { private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int numDocs) throws Exception { final int size = scaledRandomIntBetween(5, numDocs + 5); - final SearchResponse control = prepareSearch("test").setSearchType(searchType) - .setSize(numDocs) - .setQuery(QueryBuilders.matchQuery("foo", "true")) - .addSort(SortBuilders.fieldSort("_doc")) - .setTrackScores(trackScores) - .get(); - assertNoFailures(control); + assertNoFailuresAndResponse( + prepareSearch("test").setSearchType(searchType) + .setSize(numDocs) + .setQuery(QueryBuilders.matchQuery("foo", "true")) + .addSort(SortBuilders.fieldSort("_doc")) + .setTrackScores(trackScores), + control -> { - SearchResponse scroll = prepareSearch("test").setSearchType(searchType) - .setSize(size) - .setQuery(QueryBuilders.matchQuery("foo", "true")) - .addSort(SortBuilders.fieldSort("_doc")) - .setTrackScores(trackScores) - .setScroll("10m") - .get(); + SearchResponse scroll = prepareSearch("test").setSearchType(searchType) + .setSize(size) + .setQuery(QueryBuilders.matchQuery("foo", "true")) + .addSort(SortBuilders.fieldSort("_doc")) + .setTrackScores(trackScores) + .setScroll("10m") + .get(); - int scrollDocs = 0; - try { - while (true) { - assertNoFailures(scroll); - assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); - assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); - if (scroll.getHits().getHits().length == 0) { - break; + int scrollDocs = 0; + try { + while (true) { + assertNoFailures(scroll); + assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); + assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); + if (scroll.getHits().getHits().length == 0) { + break; + } + for (int i = 0; i < scroll.getHits().getHits().length; ++i) { + SearchHit controlHit = control.getHits().getAt(scrollDocs + i); + SearchHit scrollHit = scroll.getHits().getAt(i); + assertEquals(controlHit.getId(), scrollHit.getId()); + } + scrollDocs += scroll.getHits().getHits().length; + scroll.decRef(); + scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll("10m").get(); + } + assertEquals(control.getHits().getTotalHits().value, scrollDocs); + } catch (AssertionError e) { + logger.info("Control:\n{}", control); + logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); + throw e; + } finally { + clearScroll(scroll.getScrollId()); + scroll.decRef(); } - for (int i = 0; i < scroll.getHits().getHits().length; ++i) { - SearchHit controlHit = control.getHits().getAt(scrollDocs + i); - SearchHit scrollHit = scroll.getHits().getAt(i); - assertEquals(controlHit.getId(), scrollHit.getId()); - } - scrollDocs += scroll.getHits().getHits().length; - scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll("10m").get(); } - assertEquals(control.getHits().getTotalHits().value, scrollDocs); - } catch (AssertionError e) { - logger.info("Control:\n{}", control); - logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); - throw e; - } finally { - clearScroll(scroll.getScrollId()); - } + ); } public void testDuelIndexOrderQueryThenFetch() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index e8b3cfdb1768a..28723a09355a9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -47,8 +47,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -92,6 +94,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -100,6 +103,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -109,6 +113,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { } } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -146,6 +151,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } for (int i = 0; i < 32; i++) { + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -156,6 +162,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } // and now, the last one is one + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -165,6 +172,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } // a the last is zero + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); @@ -175,6 +183,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -196,11 +205,11 @@ public void testScrollAndUpdateIndex() throws Exception { indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(0L)); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 0); SearchResponse searchResponse = prepareSearch().setQuery(queryStringQuery("user:kimchy")) .setSize(35) @@ -214,23 +223,19 @@ public void testScrollAndUpdateIndex() throws Exception { map.put("message", "update"); prepareIndex("test").setId(searchHit.getId()).setSource(map).get(); } + searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); } while (searchResponse.getHits().getHits().length > 0); indicesAdmin().prepareRefresh().get(); - assertThat(prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(0L)); - assertThat( - prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, - equalTo(500L) - ); - assertThat( - prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, - equalTo(500L) - ); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "test")), 0); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 500); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("message", "update")), 500); } finally { clearScroll(searchResponse.getScrollId()); + searchResponse.decRef(); } } @@ -246,12 +251,24 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { indicesAdmin().prepareRefresh().get(); + long counter1 = 0; + long counter2 = 0; + SearchResponse searchResponse1 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); + } SearchResponse searchResponse2 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) @@ -259,36 +276,36 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); - - long counter1 = 0; - long counter2 = 0; - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); - } - - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); } - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } ClearScrollResponse clearResponse = client().prepareClearScroll() @@ -361,12 +378,24 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { indicesAdmin().prepareRefresh().get(); + long counter1 = 0; + long counter2 = 0; + SearchResponse searchResponse1 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); + } SearchResponse searchResponse2 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) @@ -374,36 +403,36 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); - - long counter1 = 0; - long counter2 = 0; - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); - } - - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse1.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + try { + assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse1.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); + } + } finally { + searchResponse1.decRef(); } - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse2.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); + try { + assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse2.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); + } + } finally { + searchResponse2.decRef(); } ClearScrollResponse clearResponse = client().prepareClearScroll().addScrollId("_all").get(); @@ -447,6 +476,7 @@ public void testDeepScrollingDoesNotBlowUp() throws Exception { if (scrollId != null) { clearScroll(scrollId); } + response.decRef(); } } } @@ -456,12 +486,16 @@ public void testThatNonExistingScrollIdReturnsCorrectException() throws Exceptio refresh(); SearchResponse searchResponse = prepareSearch("index").setSize(1).setScroll("1m").get(); - assertThat(searchResponse.getScrollId(), is(notNullValue())); + try { + assertThat(searchResponse.getScrollId(), is(notNullValue())); - ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); - assertThat(clearScrollResponse.isSucceeded(), is(true)); + ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); + assertThat(clearScrollResponse.isSucceeded(), is(true)); - assertRequestBuilderThrows(internalCluster().client().prepareSearchScroll(searchResponse.getScrollId()), RestStatus.NOT_FOUND); + assertRequestBuilderThrows(internalCluster().client().prepareSearchScroll(searchResponse.getScrollId()), RestStatus.NOT_FOUND); + } finally { + searchResponse.decRef(); + } } public void testStringSortMissingAscTerminates() throws Exception { @@ -471,30 +505,29 @@ public void testStringSortMissingAscTerminates() throws Exception { prepareIndex("test").setId("1").setSource("some_field", "test").get(); refresh(); - SearchResponse response = prepareSearch("test") - - .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last")) - .setScroll("1m") - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); - - response = client().prepareSearchScroll(response.getScrollId()).get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertNoSearchHits(response); - - response = prepareSearch("test") - - .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first")) - .setScroll("1m") - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); + assertResponse( + prepareSearch("test").addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last")).setScroll("1m"), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + assertNoFailuresAndResponse(client().prepareSearchScroll(response.getScrollId()), response2 -> { + assertHitCount(response2, 1); + assertNoSearchHits(response2); + }); + } + ); - response = client().prepareSearchScroll(response.getScrollId()).get(); - assertHitCount(response, 1); - assertThat(response.getHits().getHits().length, equalTo(0)); + assertResponse( + prepareSearch("test").addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first")).setScroll("1m"), + response -> { + assertHitCount(response, 1); + assertSearchHits(response, "1"); + assertResponse(client().prepareSearchScroll(response.getScrollId()), response2 -> { + assertHitCount(response2, 1); + assertThat(response2.getHits().getHits().length, equalTo(0)); + }); + } + ); } public void testCloseAndReopenOrDeleteWithActiveScroll() { @@ -503,17 +536,17 @@ public void testCloseAndReopenOrDeleteWithActiveScroll() { prepareIndex("test").setId(Integer.toString(i)).setSource("field", i).get(); } refresh(); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) - .setSize(35) - .setScroll(TimeValue.timeValueMinutes(2)) - .addSort("field", SortOrder.ASC) - .get(); - long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(35)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); - } + assertResponse( + prepareSearch().setQuery(matchAllQuery()).setSize(35).setScroll(TimeValue.timeValueMinutes(2)).addSort("field", SortOrder.ASC), + searchResponse -> { + long counter = 0; + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); + } + } + ); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose("test")); assertAcked(indicesAdmin().prepareOpen("test")); @@ -572,18 +605,18 @@ public void testInvalidScrollKeepAlive() throws IOException { assertNotNull(illegalArgumentException); assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (2h) is too large")); - SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)).get(); - assertNotNull(searchResponse.getScrollId()); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - - exc = expectThrows( - Exception.class, - () -> client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueHours(3)).get() - ); - illegalArgumentException = (IllegalArgumentException) ExceptionsHelper.unwrap(exc, IllegalArgumentException.class); - assertNotNull(illegalArgumentException); - assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (3h) is too large")); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)), searchResponse -> { + assertNotNull(searchResponse.getScrollId()); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + Exception ex = expectThrows( + Exception.class, + () -> client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueHours(3)).get() + ); + IllegalArgumentException iae = (IllegalArgumentException) ExceptionsHelper.unwrap(ex, IllegalArgumentException.class); + assertNotNull(iae); + assertThat(iae.getMessage(), containsString("Keep alive for request (3h) is too large")); + }); } /** @@ -614,13 +647,18 @@ public void testScrollRewrittenToMatchNoDocs() { assertNoFailures(resp); while (resp.getHits().getHits().length > 0) { totalHits += resp.getHits().getHits().length; - resp = client().prepareSearchScroll(resp.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + final String scrollId = resp.getScrollId(); + resp.decRef(); + resp = client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(1)).get(); assertNoFailures(resp); } assertThat(totalHits, equalTo(2)); } finally { - if (resp != null && resp.getScrollId() != null) { - client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + if (resp != null) { + if (resp.getScrollId() != null) { + client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + } + resp.decRef(); } } } @@ -635,26 +673,38 @@ public void testRestartDataNodesDuringScrollSearch() throws Exception { index("prod", "prod-" + i, Map.of()); } indicesAdmin().prepareRefresh().get(); + final String respFromDemoIndexScrollId; SearchResponse respFromDemoIndex = prepareSearch("demo").setSize(randomIntBetween(1, 10)) .setQuery(new MatchAllQueryBuilder()) .setScroll(TimeValue.timeValueMinutes(5)) .get(); + try { + respFromDemoIndexScrollId = respFromDemoIndex.getScrollId(); + } finally { + respFromDemoIndex.decRef(); + } internalCluster().restartNode(dataNode, new InternalTestCluster.RestartCallback()); ensureGreen("demo", "prod"); + final String respFromProdIndexScrollId; SearchResponse respFromProdIndex = prepareSearch("prod").setSize(randomIntBetween(1, 10)) .setQuery(new MatchAllQueryBuilder()) .setScroll(TimeValue.timeValueMinutes(5)) .get(); - assertNoFailures(respFromProdIndex); + try { + assertNoFailures(respFromProdIndex); + respFromProdIndexScrollId = respFromProdIndex.getScrollId(); + } finally { + respFromProdIndex.decRef(); + } SearchPhaseExecutionException error = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearchScroll(respFromDemoIndex.getScrollId()).get() + () -> client().prepareSearchScroll(respFromDemoIndexScrollId).get() ); for (ShardSearchFailure shardSearchFailure : error.shardFailures()) { assertThat(shardSearchFailure.getCause().getMessage(), containsString("No search context found for id [1]")); } - client().prepareSearchScroll(respFromProdIndex.getScrollId()).get(); + client().prepareSearchScroll(respFromProdIndexScrollId).get().decRef(); } private void assertToXContentResponse(ClearScrollResponse response, boolean succeed, int numFreed) throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java index 23a38c0608490..42be70e5ff8b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -62,30 +62,37 @@ public void testScanScrollWithShardExceptions() throws Exception { .setSize(10) .setScroll(TimeValue.timeValueMinutes(1)) .get(); - assertAllSuccessful(searchResponse); - long numHits = 0; - do { - numHits += searchResponse.getHits().getHits().length; - searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + try { assertAllSuccessful(searchResponse); - } while (searchResponse.getHits().getHits().length > 0); - assertThat(numHits, equalTo(100L)); - clearScroll("_all"); + long numHits = 0; + do { + numHits += searchResponse.getHits().getHits().length; + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertAllSuccessful(searchResponse); + } while (searchResponse.getHits().getHits().length > 0); + assertThat(numHits, equalTo(100L)); + clearScroll("_all"); - internalCluster().stopRandomNonMasterNode(); + internalCluster().stopRandomNonMasterNode(); - searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).setScroll(TimeValue.timeValueMinutes(1)).get(); - assertThat(searchResponse.getSuccessfulShards(), lessThan(searchResponse.getTotalShards())); - numHits = 0; - int numberOfSuccessfulShards = searchResponse.getSuccessfulShards(); - do { - numHits += searchResponse.getHits().getHits().length; - searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); - assertThat(searchResponse.getSuccessfulShards(), equalTo(numberOfSuccessfulShards)); - } while (searchResponse.getHits().getHits().length > 0); - assertThat(numHits, greaterThan(0L)); + searchResponse.decRef(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertThat(searchResponse.getSuccessfulShards(), lessThan(searchResponse.getTotalShards())); + numHits = 0; + int numberOfSuccessfulShards = searchResponse.getSuccessfulShards(); + do { + numHits += searchResponse.getHits().getHits().length; + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); + assertThat(searchResponse.getSuccessfulShards(), equalTo(numberOfSuccessfulShards)); + } while (searchResponse.getHits().getHits().length > 0); + assertThat(numHits, greaterThan(0L)); - clearScroll(searchResponse.getScrollId()); + clearScroll(searchResponse.getScrollId()); + } finally { + searchResponse.decRef(); + } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 6219c1b72253a..d76031d402af0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -50,6 +50,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayWithSize; @@ -157,15 +159,18 @@ public void testWithNullStrings() throws InterruptedException { prepareIndex("test").setId("0").setSource("field1", 0), prepareIndex("test").setId("1").setSource("field1", 100, "field2", "toto") ); - SearchResponse searchResponse = prepareSearch("test").addSort("field1", SortOrder.ASC) - .addSort("field2", SortOrder.ASC) - .setQuery(matchAllQuery()) - .searchAfter(new Object[] { 0, null }) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); - assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); + assertResponse( + prepareSearch("test").addSort("field1", SortOrder.ASC) + .addSort("field2", SortOrder.ASC) + .setQuery(matchAllQuery()) + .searchAfter(new Object[] { 0, null }), + searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); + assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); + assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); + } + ); } public void testWithSimpleTypes() throws Exception { @@ -229,31 +234,36 @@ public void testWithCustomFormatSortValueOfDateField() throws Exception { .add(new IndexRequest("test").id("5").source("start_date", "2017-01-20", "end_date", "2025-05-28")) .get(); - SearchResponse resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("22/01/2015", "2022-07-23")); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("21/02/2016", "2024-03-24")); - - resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) - .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", "2025-05-28")); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", "2021-02-22")); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("22/01/2015", "2022-07-23")); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("21/02/2016", "2024-03-24")); + } + ); - resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) - .addSort(SortBuilders.fieldSort("end_date")) // it's okay because end_date has the format "yyyy-MM-dd" - .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) - .setSize(2) - .get(); - assertNoFailures(resp); - assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", 1748390400000L)); - assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", 1613952000000L)); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) + .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", "2025-05-28")); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", "2021-02-22")); + } + ); + assertNoFailuresAndResponse( + prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + .addSort(SortBuilders.fieldSort("end_date")) // it's okay because end_date has the format "yyyy-MM-dd" + .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) + .setSize(2), + resp -> { + assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", 1748390400000L)); + assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", 1613952000000L)); + } + ); SearchRequestBuilder searchRequest = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) .addSort(SortBuilders.fieldSort("end_date").setFormat("epoch_millis")) @@ -332,11 +342,15 @@ private void assertSearchFromWithSortValues(String indexName, List> req.searchAfter(sortValues); } SearchResponse searchResponse = req.get(); - for (SearchHit hit : searchResponse.getHits()) { - List toCompare = convertSortValues(documents.get(offset++)); - assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.getSortValues())), equalTo(0)); + try { + for (SearchHit hit : searchResponse.getHits()) { + List toCompare = convertSortValues(documents.get(offset++)); + assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.getSortValues())), equalTo(0)); + } + sortValues = searchResponse.getHits().getHits()[searchResponse.getHits().getHits().length - 1].getSortValues(); + } finally { + searchResponse.decRef(); } - sortValues = searchResponse.getHits().getHits()[searchResponse.getHits().getHits().length - 1].getSortValues(); } } @@ -445,11 +459,13 @@ public void testScrollAndSearchAfterWithBigIndex() { assertThat(((Number) timestamp).longValue(), equalTo(timestamps.get(foundHits))); foundHits++; } + resp.decRef(); resp = client().prepareSearchScroll(resp.getScrollId()).setScroll(TimeValue.timeValueMinutes(5)).get(); } while (resp.getHits().getHits().length > 0); assertThat(foundHits, equalTo(timestamps.size())); } finally { client().prepareClearScroll().addScrollId(resp.getScrollId()).get(); + resp.decRef(); } } // search_after with sort with point in time @@ -479,11 +495,13 @@ public void testScrollAndSearchAfterWithBigIndex() { assertNotNull(after); assertThat("Sorted by timestamp and pit tier breaker", after, arrayWithSize(2)); searchRequest.source().searchAfter(after); + resp.decRef(); resp = client().search(searchRequest).actionGet(); } while (resp.getHits().getHits().length > 0); assertThat(foundHits, equalTo(timestamps.size())); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitID)).actionGet(); + resp.decRef(); } } @@ -512,12 +530,14 @@ public void testScrollAndSearchAfterWithBigIndex() { assertNotNull(after); assertThat("sorted by pit tie breaker", after, arrayWithSize(1)); searchRequest.source().searchAfter(after); + resp.decRef(); resp = client().search(searchRequest).actionGet(); } while (resp.getHits().getHits().length > 0); Collections.sort(foundSeqNos); assertThat(foundSeqNos, equalTo(timestamps)); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitID)).actionGet(); + resp.decRef(); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 527d8bed8bc68..93340bedbdae3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -39,6 +39,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -111,8 +112,8 @@ public void testWithPreferenceAndRoutings() throws Exception { int numShards = 10; int totalDocs = randomIntBetween(100, 1000); setupIndex(totalDocs, numShards); - { - SearchResponse sr = prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0).get(); + + assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -122,9 +123,9 @@ public void testWithPreferenceAndRoutings() throws Exception { .setPreference("_shards:1,4") .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } - { - SearchResponse sr = prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0).get(); + }); + + assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -134,15 +135,15 @@ public void testWithPreferenceAndRoutings() throws Exception { .setRouting("foo", "bar") .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } - { - assertAcked( - indicesAdmin().prepareAliases() - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias1").routing("foo")) - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias2").routing("bar")) - .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) - ); - SearchResponse sr = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0).get(); + }); + + assertAcked( + indicesAdmin().prepareAliases() + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias1").routing("foo")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias2").routing("bar")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) + ); + assertResponse(prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0), sr -> { int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); @@ -151,7 +152,7 @@ public void testWithPreferenceAndRoutings() throws Exception { .setSize(fetchSize) .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, "_id", max, numDocs); - } + }); } private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String field, int numSlice, int numDocs) { @@ -160,27 +161,32 @@ private void assertSearchSlicesWithScroll(SearchRequestBuilder request, String f for (int id = 0; id < numSlice; id++) { SliceBuilder sliceBuilder = new SliceBuilder(field, id, numSlice); SearchResponse searchResponse = request.slice(sliceBuilder).get(); - totalResults += searchResponse.getHits().getHits().length; - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; - int numSliceResults = searchResponse.getHits().getHits().length; - String scrollId = searchResponse.getScrollId(); - for (SearchHit hit : searchResponse.getHits().getHits()) { - assertTrue(keys.add(hit.getId())); - } - while (searchResponse.getHits().getHits().length > 0) { - searchResponse = client().prepareSearchScroll("test") - .setScrollId(scrollId) - .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) - .get(); - scrollId = searchResponse.getScrollId(); + try { totalResults += searchResponse.getHits().getHits().length; - numSliceResults += searchResponse.getHits().getHits().length; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int numSliceResults = searchResponse.getHits().getHits().length; + String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { assertTrue(keys.add(hit.getId())); } + while (searchResponse.getHits().getHits().length > 0) { + searchResponse.decRef(); + searchResponse = client().prepareSearchScroll("test") + .setScrollId(scrollId) + .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) + .get(); + scrollId = searchResponse.getScrollId(); + totalResults += searchResponse.getHits().getHits().length; + numSliceResults += searchResponse.getHits().getHits().length; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } + } + assertThat(numSliceResults, equalTo(expectedSliceResults)); + clearScroll(scrollId); + } finally { + searchResponse.decRef(); } - assertThat(numSliceResults, equalTo(expectedSliceResults)); - clearScroll(scrollId); } assertThat(totalResults, equalTo(numDocs)); assertThat(keys.size(), equalTo(numDocs)); @@ -222,24 +228,29 @@ private void assertSearchSlicesWithPointInTime(String sliceField, String sortFie .setSize(randomIntBetween(10, 100)); SearchResponse searchResponse = request.get(); - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + try { + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; - while (true) { - int numHits = searchResponse.getHits().getHits().length; - if (numHits == 0) { - break; - } + while (true) { + int numHits = searchResponse.getHits().getHits().length; + if (numHits == 0) { + break; + } - totalResults += numHits; - numSliceResults += numHits; - for (SearchHit hit : searchResponse.getHits().getHits()) { - assertTrue(keys.add(hit.getId())); - } + totalResults += numHits; + numSliceResults += numHits; + for (SearchHit hit : searchResponse.getHits().getHits()) { + assertTrue(keys.add(hit.getId())); + } - Object[] sortValues = searchResponse.getHits().getHits()[numHits - 1].getSortValues(); - searchResponse = request.searchAfter(sortValues).get(); + Object[] sortValues = searchResponse.getHits().getHits()[numHits - 1].getSortValues(); + searchResponse.decRef(); + searchResponse = request.searchAfter(sortValues).get(); + } + assertThat(numSliceResults, equalTo(expectedSliceResults)); + } finally { + searchResponse.decRef(); } - assertThat(numSliceResults, equalTo(expectedSliceResults)); } assertThat(totalResults, equalTo(numDocs)); assertThat(keys.size(), equalTo(numDocs)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 2967bdc454aed..4a10bf6cf8fab 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -86,8 +86,8 @@ public void testWithRouting() { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - - response = prepareSearch("test").storedFields("_none_").get(); + }); + assertResponse(prepareSearch("test").storedFields("_none_"), response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java index 0d850a3708044..160cba19700ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java @@ -119,7 +119,8 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio .addAggregation(AggregationBuilders.terms("agg1").field("field.keyword")) .setSize(0) .setPreference("fixed") - .get(); + .get() + .decRef(); stats = aggregated(client().execute(FieldUsageStatsAction.INSTANCE, new FieldUsageStatsRequest()).get().getStats().get("test")); logger.info("Stats after second query: {}", stats); @@ -148,7 +149,8 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio .setQuery(QueryBuilders.rangeQuery("date_field").from("2016/01/01")) .setSize(100) .setPreference("fixed") - .get(); + .get() + .decRef(); stats = aggregated(client().execute(FieldUsageStatsAction.INSTANCE, new FieldUsageStatsRequest()).get().getStats().get("test")); logger.info("Stats after third query: {}", stats); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index b5f7468d1645c..9ca565cef7843 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -58,6 +58,7 @@ import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; @@ -945,10 +946,11 @@ public void testThatStatsAreWorking() throws Exception { ensureGreen(); // load the fst index into ram prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("f"))) - .get(); + .get() + .decRef(); prepareSearch(INDEX).suggest( new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(otherField).prefix("f")) - ).get(); + ).get().decRef(); // Get all stats IndicesStatsResponse indicesStatsResponse = indicesAdmin().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).get(); @@ -1278,7 +1280,7 @@ public void testPrunedSegments() throws IOException { refresh(); assertSuggestions("b"); - assertThat(2L, equalTo(prepareSearch(INDEX).setSize(0).get().getHits().getTotalHits().value)); + assertHitCount(prepareSearch(INDEX).setSize(0), 2); for (IndexShardSegments seg : indicesAdmin().prepareSegments().get().getIndices().get(INDEX)) { ShardSegments[] shards = seg.shards(); for (ShardSegments shardSegments : shards) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 42c19a903b452..b04aa321f70f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -55,6 +55,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -953,7 +954,7 @@ public void testQueuedSnapshotsWaitingForShardReady() throws Exception { indexDoc(testIndex, Integer.toString(i), "foo", "bar" + i); } refresh(); - assertThat(prepareSearch(testIndex).setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertHitCount(prepareSearch(testIndex).setSize(0), 100); logger.info("--> start relocations"); allowNodes(testIndex, 1); diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 570d583335a12..0b5b953df84fc 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -502,9 +502,14 @@ protected void indexRandomDocs(String index, int numdocs) throws InterruptedExce } protected long getCountForIndex(String indexName) { - return client().search( + var resp = client().search( new SearchRequest(new SearchRequest(indexName).source(new SearchSourceBuilder().size(0).trackTotalHits(true))) - ).actionGet().getHits().getTotalHits().value; + ).actionGet(); + try { + return resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } } protected void assertDocCount(String index, long count) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 2f6286092b535..e0083d5570baa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1064,18 +1064,17 @@ public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) thr if (lastKnownCount >= numDocs) { try { - long count = prepareSearch().setTrackTotalHits(true) - .setSize(0) - .setQuery(matchAllQuery()) - .get() - .getHits() - .getTotalHits().value; - - if (count == lastKnownCount) { - // no progress - try to refresh for the next time - indicesAdmin().prepareRefresh().get(); + var resp = prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()).get(); + try { + long count = resp.getHits().getTotalHits().value; + if (count == lastKnownCount) { + // no progress - try to refresh for the next time + indicesAdmin().prepareRefresh().get(); + } + lastKnownCount = count; + } finally { + resp.decRef(); } - lastKnownCount = count; } catch (Exception e) { // count now acts like search and barfs if all shards failed... logger.debug("failed to executed count", e); throw e; From f99b4459d7dd3b46cb6e75116a054669f18d4404 Mon Sep 17 00:00:00 2001 From: Riahiamirreza <54557628+Riahiamirreza@users.noreply.github.com> Date: Tue, 5 Dec 2023 00:14:12 +0330 Subject: [PATCH 190/263] Remove redundant character in mlt-query.asciidoc (#102945) --- docs/reference/query-dsl/mlt-query.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index 8550a34efaa4d..c470beaa9e41d 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -107,7 +107,7 @@ analyzes it, usually using the same analyzer at the field, then selects the top K terms with highest tf-idf to form a disjunctive query of these terms. IMPORTANT: The fields on which to perform MLT must be indexed and of type -`text` or `keyword``. Additionally, when using `like` with documents, either +`text` or `keyword`. Additionally, when using `like` with documents, either `_source` must be enabled or the fields must be `stored` or store `term_vector`. In order to speed up analysis, it could help to store term vectors at index time. From eee5f98550ae4c815e1b3549b11df8bab2a861fa Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 4 Dec 2023 21:48:51 +0100 Subject: [PATCH 191/263] [Connector API] Implement configuration internal representation + update endpoint (#102927) --- .../api/connector.update_configuration.json | 39 ++ .../335_connector_update_configuration.yml | 183 ++++++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 32 +- .../connector/ConnectorConfiguration.java | 442 ++++++++++++++++++ .../connector/ConnectorIndexService.java | 39 ++ ...estUpdateConnectorConfigurationAction.java | 45 ++ ...ortUpdateConnectorConfigurationAction.java | 55 +++ .../UpdateConnectorConfigurationAction.java | 202 ++++++++ .../ConfigurationDependency.java | 129 +++++ .../ConfigurationDisplayType.java | 32 ++ .../configuration/ConfigurationFieldType.java | 35 ++ .../ConfigurationSelectOption.java | 106 +++++ .../ConfigurationValidation.java | 131 ++++++ .../ConfigurationValidationType.java | 33 ++ .../connector/syncjob/ConnectorSyncJob.java | 3 +- .../syncjob/ConnectorSyncJobIndexService.java | 5 +- .../ConnectorConfigurationTests.java | 97 ++++ .../connector/ConnectorIndexServiceTests.java | 51 ++ .../connector/ConnectorTestUtils.java | 65 ++- .../application/connector/ConnectorTests.java | 276 ++++++----- ...ationActionRequestBWCSerializingTests.java | 52 +++ ...tionActionResponseBWCSerializingTests.java | 43 ++ .../xpack/security/operator/Constants.java | 1 + 24 files changed, 1957 insertions(+), 144 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDependency.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json new file mode 100644 index 0000000000000..347418940b4c9 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json @@ -0,0 +1,39 @@ +{ + "connector.update_configuration": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the connector configuration." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_configuration", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "Mapping between field names to configuration.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml new file mode 100644 index 0000000000000..260e1784d29e2 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -0,0 +1,183 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Configuration": + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { configuration.some_field.value: 123 } + - match: { configuration.some_field.sensitive: false } + - match: { configuration.some_field.display: numeric } + - match: { status: configured } + + + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 456 + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { configuration.some_field.value: 456 } + - match: { status: configured } + +--- +"Update Connector Configuration - Connector doesn't exist": + - do: + catch: "missing" + connector.update_configuration: + connector_id: test-non-existent-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + +--- +"Update Connector Configuration - Required fields are missing": + - do: + catch: "bad_request" + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + +--- +"Update Connector Configuration - Unknown field type": + - do: + catch: "bad_request" + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: some_display_type + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + +--- +"Update Connector Configuration - Unknown constraint": + - do: + catch: "bad_request" + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: unknown_constraint + value: 123 diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index f93177666f3d8..1a8ae73c41935 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -50,6 +50,7 @@ import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; @@ -60,12 +61,14 @@ import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; @@ -207,6 +210,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), + new ActionHandler<>(UpdateConnectorConfigurationAction.INSTANCE, TransportUpdateConnectorConfigurationAction.class), new ActionHandler<>(UpdateConnectorErrorAction.INSTANCE, TransportUpdateConnectorErrorAction.class), new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), @@ -275,6 +279,7 @@ public List getRestHandlers( new RestGetConnectorAction(), new RestListConnectorAction(), new RestPutConnectorAction(), + new RestUpdateConnectorConfigurationAction(), new RestUpdateConnectorErrorAction(), new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorLastSeenAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index d68cc9f7227bc..73d066f64d197 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -70,7 +70,7 @@ public class Connector implements NamedWriteable, ToXContentObject { @Nullable private final String apiKeyId; @Nullable - private final Map configuration; // TODO: add explicit types + private final Map configuration; @Nullable private final Map customScheduling; @Nullable @@ -131,7 +131,7 @@ public class Connector implements NamedWriteable, ToXContentObject { private Connector( String connectorId, String apiKeyId, - Map configuration, + Map configuration, Map customScheduling, String description, String error, @@ -175,7 +175,7 @@ private Connector( public Connector(StreamInput in) throws IOException { this.connectorId = in.readString(); this.apiKeyId = in.readOptionalString(); - this.configuration = in.readMap(StreamInput::readGenericValue); + this.configuration = in.readMap(ConnectorConfiguration::new); this.customScheduling = in.readMap(ConnectorCustomSchedule::new); this.description = in.readOptionalString(); this.error = in.readOptionalString(); @@ -220,7 +220,7 @@ public Connector(StreamInput in) throws IOException { int i = 0; return new Builder().setConnectorId((String) args[i++]) .setApiKeyId((String) args[i++]) - .setConfiguration((Map) args[i++]) + .setConfiguration((Map) args[i++]) .setCustomScheduling((Map) args[i++]) .setDescription((String) args[i++]) .setError((String) args[i++]) @@ -258,7 +258,7 @@ public Connector(StreamInput in) throws IOException { PARSER.declareString(optionalConstructorArg(), API_KEY_ID_FIELD); PARSER.declareField( optionalConstructorArg(), - (parser, context) -> parser.map(), + (p, c) -> p.map(HashMap::new, ConnectorConfiguration::fromXContent), CONFIGURATION_FIELD, ObjectParser.ValueType.OBJECT ); @@ -378,10 +378,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(API_KEY_ID_FIELD.getPreferredName(), apiKeyId); } if (configuration != null) { - builder.field(CONFIGURATION_FIELD.getPreferredName(), configuration); + builder.xContentValuesMap(CONFIGURATION_FIELD.getPreferredName(), configuration); } if (customScheduling != null) { - builder.field(CUSTOM_SCHEDULING_FIELD.getPreferredName(), customScheduling); + builder.xContentValuesMap(CUSTOM_SCHEDULING_FIELD.getPreferredName(), customScheduling); } if (description != null) { builder.field(DESCRIPTION_FIELD.getPreferredName(), description); @@ -433,7 +433,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void writeTo(StreamOutput out) throws IOException { out.writeString(connectorId); out.writeOptionalString(apiKeyId); - out.writeMap(configuration, StreamOutput::writeGenericValue); + out.writeMap(configuration, StreamOutput::writeWriteable); out.writeMap(customScheduling, StreamOutput::writeWriteable); out.writeOptionalString(description); out.writeOptionalString(error); @@ -461,10 +461,6 @@ public String getApiKeyId() { return apiKeyId; } - public Map getConfiguration() { - return configuration; - } - public Map getCustomScheduling() { return customScheduling; } @@ -513,8 +509,8 @@ public String getServiceType() { return serviceType; } - public ConnectorStatus getStatus() { - return status; + public Map getConfiguration() { + return configuration; } public Object getSyncCursor() { @@ -533,6 +529,10 @@ public Instant getLastSeen() { return lastSeen; } + public ConnectorStatus getStatus() { + return status; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -595,7 +595,7 @@ public static class Builder { private String connectorId; private String apiKeyId; - private Map configuration = Collections.emptyMap(); + private Map configuration = Collections.emptyMap(); private Map customScheduling = Collections.emptyMap(); private String description; private String error; @@ -625,7 +625,7 @@ public Builder setApiKeyId(String apiKeyId) { return this; } - public Builder setConfiguration(Map configuration) { + public Builder setConfiguration(Map configuration) { this.configuration = configuration; return this; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java new file mode 100644 index 0000000000000..103c647f180b4 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java @@ -0,0 +1,442 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDependency; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDisplayType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationFieldType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationSelectOption; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationValidation; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Represents the configuration field settings for a connector. + */ +public class ConnectorConfiguration implements Writeable, ToXContentObject { + + @Nullable + private final String category; + private final Object defaultValue; + private final List dependsOn; + private final ConfigurationDisplayType display; + private final String label; + private final List options; + @Nullable + private final Integer order; + @Nullable + private final String placeholder; + private final boolean required; + private final boolean sensitive; + private final String tooltip; + private final ConfigurationFieldType type; + private final List uiRestrictions; + private final List validations; + private final Object value; + + /** + * Constructs a new {@link ConnectorConfiguration} instance with specified properties. + * + * @param category The category of the configuration field. + * @param defaultValue The default value for the configuration. + * @param dependsOn A list of {@link ConfigurationDependency} indicating dependencies on other configurations. + * @param display The display type, defined by {@link ConfigurationDisplayType}. + * @param label The display label associated with the config field. + * @param options A list of {@link ConfigurationSelectOption} for selectable options. + * @param order The order in which this configuration appears. + * @param placeholder A placeholder text for the configuration field. + * @param required A boolean indicating whether the configuration is required. + * @param sensitive A boolean indicating whether the configuration contains sensitive information. + * @param tooltip A tooltip text providing additional information about the configuration. + * @param type The type of the configuration field, defined by {@link ConfigurationFieldType}. + * @param uiRestrictions A list of UI restrictions in string format. + * @param validations A list of {@link ConfigurationValidation} for validating the configuration. + * @param value The current value of the configuration. + */ + private ConnectorConfiguration( + String category, + Object defaultValue, + List dependsOn, + ConfigurationDisplayType display, + String label, + List options, + Integer order, + String placeholder, + boolean required, + boolean sensitive, + String tooltip, + ConfigurationFieldType type, + List uiRestrictions, + List validations, + Object value + ) { + this.category = category; + this.defaultValue = defaultValue; + this.dependsOn = dependsOn; + this.display = display; + this.label = label; + this.options = options; + this.order = order; + this.placeholder = placeholder; + this.required = required; + this.sensitive = sensitive; + this.tooltip = tooltip; + this.type = type; + this.uiRestrictions = uiRestrictions; + this.validations = validations; + this.value = value; + } + + public ConnectorConfiguration(StreamInput in) throws IOException { + this.category = in.readString(); + this.defaultValue = in.readGenericValue(); + this.dependsOn = in.readOptionalCollectionAsList(ConfigurationDependency::new); + this.display = in.readEnum(ConfigurationDisplayType.class); + this.label = in.readString(); + this.options = in.readOptionalCollectionAsList(ConfigurationSelectOption::new); + this.order = in.readOptionalInt(); + this.placeholder = in.readOptionalString(); + this.required = in.readBoolean(); + this.sensitive = in.readBoolean(); + this.tooltip = in.readOptionalString(); + this.type = in.readEnum(ConfigurationFieldType.class); + this.uiRestrictions = in.readOptionalStringCollectionAsList(); + this.validations = in.readOptionalCollectionAsList(ConfigurationValidation::new); + this.value = in.readGenericValue(); + } + + static final ParseField CATEGORY_FIELD = new ParseField("category"); + static final ParseField DEFAULT_VALUE_FIELD = new ParseField("default_value"); + static final ParseField DEPENDS_ON_FIELD = new ParseField("depends_on"); + static final ParseField DISPLAY_FIELD = new ParseField("display"); + static final ParseField LABEL_FIELD = new ParseField("label"); + static final ParseField OPTIONS_FIELD = new ParseField("options"); + static final ParseField ORDER_FIELD = new ParseField("order"); + static final ParseField PLACEHOLDER_FIELD = new ParseField("placeholder"); + static final ParseField REQUIRED_FIELD = new ParseField("required"); + static final ParseField SENSITIVE_FIELD = new ParseField("sensitive"); + static final ParseField TOOLTIP_FIELD = new ParseField("tooltip"); + static final ParseField TYPE_FIELD = new ParseField("type"); + static final ParseField UI_RESTRICTIONS_FIELD = new ParseField("ui_restrictions"); + static final ParseField VALIDATIONS_FIELD = new ParseField("validations"); + static final ParseField VALUE_FIELD = new ParseField("value"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_configuration_dependency", + true, + args -> { + int i = 0; + return new ConnectorConfiguration.Builder().setCategory((String) args[i++]) + .setDefaultValue(args[i++]) + .setDependsOn((List) args[i++]) + .setDisplay((ConfigurationDisplayType) args[i++]) + .setLabel((String) args[i++]) + .setOptions((List) args[i++]) + .setOrder((Integer) args[i++]) + .setPlaceholder((String) args[i++]) + .setRequired((boolean) args[i++]) + .setSensitive((boolean) args[i++]) + .setTooltip((String) args[i++]) + .setType((ConfigurationFieldType) args[i++]) + .setUiRestrictions((List) args[i++]) + .setValidations((List) args[i++]) + .setValue(args[i]) + .build(); + } + ); + + static { + PARSER.declareString(optionalConstructorArg(), CATEGORY_FIELD); + PARSER.declareField(optionalConstructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + return p.booleanValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, DEFAULT_VALUE_FIELD, ObjectParser.ValueType.VALUE); + PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationDependency.fromXContent(p), DEPENDS_ON_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> ConfigurationDisplayType.displayType(p.text()), + DISPLAY_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareString(constructorArg(), LABEL_FIELD); + PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationSelectOption.fromXContent(p), OPTIONS_FIELD); + PARSER.declareInt(optionalConstructorArg(), ORDER_FIELD); + PARSER.declareString(optionalConstructorArg(), PLACEHOLDER_FIELD); + PARSER.declareBoolean(constructorArg(), REQUIRED_FIELD); + PARSER.declareBoolean(constructorArg(), SENSITIVE_FIELD); + PARSER.declareStringOrNull(constructorArg(), TOOLTIP_FIELD); + PARSER.declareField( + constructorArg(), + (p, c) -> ConfigurationFieldType.fieldType(p.text()), + TYPE_FIELD, + ObjectParser.ValueType.STRING + ); + PARSER.declareStringArray(constructorArg(), UI_RESTRICTIONS_FIELD); + PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationValidation.fromXContent(p), VALIDATIONS_FIELD); + PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + return p.booleanValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, VALUE_FIELD, ObjectParser.ValueType.VALUE); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (category != null) { + builder.field(CATEGORY_FIELD.getPreferredName(), category); + } + builder.field(DEFAULT_VALUE_FIELD.getPreferredName(), defaultValue); + builder.xContentList(DEPENDS_ON_FIELD.getPreferredName(), dependsOn); + builder.field(DISPLAY_FIELD.getPreferredName(), display.toString()); + builder.field(LABEL_FIELD.getPreferredName(), label); + builder.xContentList(OPTIONS_FIELD.getPreferredName(), options); + if (order != null) { + builder.field(ORDER_FIELD.getPreferredName(), order); + } + if (placeholder != null) { + builder.field(PLACEHOLDER_FIELD.getPreferredName(), placeholder); + } + builder.field(REQUIRED_FIELD.getPreferredName(), required); + builder.field(SENSITIVE_FIELD.getPreferredName(), sensitive); + builder.field(TOOLTIP_FIELD.getPreferredName(), tooltip); + builder.field(TYPE_FIELD.getPreferredName(), type.toString()); + builder.stringListField(UI_RESTRICTIONS_FIELD.getPreferredName(), uiRestrictions); + builder.xContentList(VALIDATIONS_FIELD.getPreferredName(), validations); + builder.field(VALUE_FIELD.getPreferredName(), value); + } + builder.endObject(); + return builder; + } + + public static ConnectorConfiguration fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public static ConnectorConfiguration fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorConfiguration.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector configuration field.", e); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(category); + out.writeGenericValue(defaultValue); + out.writeOptionalCollection(dependsOn); + out.writeEnum(display); + out.writeString(label); + out.writeOptionalCollection(options); + out.writeOptionalInt(order); + out.writeOptionalString(placeholder); + out.writeBoolean(required); + out.writeBoolean(sensitive); + out.writeOptionalString(tooltip); + out.writeEnum(type); + out.writeOptionalStringCollection(uiRestrictions); + out.writeOptionalCollection(validations); + out.writeGenericValue(value); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConnectorConfiguration that = (ConnectorConfiguration) o; + return required == that.required + && sensitive == that.sensitive + && Objects.equals(category, that.category) + && Objects.equals(defaultValue, that.defaultValue) + && Objects.equals(dependsOn, that.dependsOn) + && display == that.display + && Objects.equals(label, that.label) + && Objects.equals(options, that.options) + && Objects.equals(order, that.order) + && Objects.equals(placeholder, that.placeholder) + && Objects.equals(tooltip, that.tooltip) + && type == that.type + && Objects.equals(uiRestrictions, that.uiRestrictions) + && Objects.equals(validations, that.validations) + && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash( + category, + defaultValue, + dependsOn, + display, + label, + options, + order, + placeholder, + required, + sensitive, + tooltip, + type, + uiRestrictions, + validations, + value + ); + } + + public static class Builder { + + private String category; + private Object defaultValue; + private List dependsOn; + private ConfigurationDisplayType display; + private String label; + private List options; + private Integer order; + private String placeholder; + private boolean required; + private boolean sensitive; + private String tooltip; + private ConfigurationFieldType type; + private List uiRestrictions; + private List validations; + private Object value; + + public Builder setCategory(String category) { + this.category = category; + return this; + } + + public Builder setDefaultValue(Object defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + public Builder setDependsOn(List dependsOn) { + this.dependsOn = dependsOn; + return this; + } + + public Builder setDisplay(ConfigurationDisplayType display) { + this.display = display; + return this; + } + + public Builder setLabel(String label) { + this.label = label; + return this; + } + + public Builder setOptions(List options) { + this.options = options; + return this; + } + + public Builder setOrder(Integer order) { + this.order = order; + return this; + } + + public Builder setPlaceholder(String placeholder) { + this.placeholder = placeholder; + return this; + } + + public Builder setRequired(boolean required) { + this.required = required; + return this; + } + + public Builder setSensitive(boolean sensitive) { + this.sensitive = sensitive; + return this; + } + + public Builder setTooltip(String tooltip) { + this.tooltip = tooltip; + return this; + } + + public Builder setType(ConfigurationFieldType type) { + this.type = type; + return this; + } + + public Builder setUiRestrictions(List uiRestrictions) { + this.uiRestrictions = uiRestrictions; + return this; + } + + public Builder setValidations(List validations) { + this.validations = validations; + return this; + } + + public Builder setValue(Object value) { + this.value = value; + return this; + } + + public ConnectorConfiguration build() { + return new ConnectorConfiguration( + category, + defaultValue, + dependsOn, + display, + label, + options, + order, + placeholder, + required, + sensitive, + tooltip, + type, + uiRestrictions, + validations, + value + ); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 744a4d2028990..624697edfcd85 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; @@ -41,6 +42,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.function.BiConsumer; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -174,6 +176,43 @@ public void onFailure(Exception e) { } } + /** + * Updates the {@link ConnectorConfiguration} property of a {@link Connector}. + * + * @param request Request for updating connector configuration property. + * @param listener Listener to respond to a successful response or an error. + */ + public void updateConnectorConfiguration(UpdateConnectorConfigurationAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source( + Map.of( + Connector.CONFIGURATION_FIELD.getPreferredName(), + request.getConfiguration(), + Connector.STATUS_FIELD.getPreferredName(), + ConnectorStatus.CONFIGURED.toString() + ) + ) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Updates the {@link ConnectorFiltering} property of a {@link Connector}. * diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java new file mode 100644 index 0000000000000..aa46353d47999 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorConfigurationAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_configuration_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_configuration")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorConfigurationAction.Request request = UpdateConnectorConfigurationAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorConfigurationAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorConfigurationAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java new file mode 100644 index 0000000000000..211c3b5a3a670 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorConfigurationAction extends HandledTransportAction< + UpdateConnectorConfigurationAction.Request, + UpdateConnectorConfigurationAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorConfigurationAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorConfigurationAction.NAME, + transportService, + actionFilters, + UpdateConnectorConfigurationAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorConfigurationAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorConfiguration( + request, + listener.map(r -> new UpdateConnectorConfigurationAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java new file mode 100644 index 0000000000000..6b5f52f3afda7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java @@ -0,0 +1,202 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorConfiguration; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorConfigurationAction extends ActionType { + + public static final UpdateConnectorConfigurationAction INSTANCE = new UpdateConnectorConfigurationAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_configuration"; + + public UpdateConnectorConfigurationAction() { + super(NAME, UpdateConnectorConfigurationAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + private final Map configuration; + + public Request(String connectorId, Map configuration) { + this.connectorId = connectorId; + this.configuration = configuration; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.configuration = in.readMap(ConnectorConfiguration::new); + } + + public String getConnectorId() { + return connectorId; + } + + public Map getConfiguration() { + return configuration; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + + if (Objects.isNull(configuration)) { + validationException = addValidationError("[configuration] cannot be null.", validationException); + } + + return validationException; + } + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "connector_update_configuration_request", + false, + ((args, connectorId) -> new UpdateConnectorConfigurationAction.Request( + connectorId, + (Map) args[0] + )) + ); + + static { + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> p.map(HashMap::new, ConnectorConfiguration::fromXContent), + Connector.CONFIGURATION_FIELD, + ObjectParser.ValueType.OBJECT + ); + } + + public static UpdateConnectorConfigurationAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorConfigurationAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse connector configuration.", e); + } + } + + public static UpdateConnectorConfigurationAction.Request fromXContent(XContentParser parser, String connectorId) + throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.CONFIGURATION_FIELD.getPreferredName(), configuration); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeMap(configuration, StreamOutput::writeWriteable); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) && Objects.equals(configuration, request.configuration); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, configuration); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDependency.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDependency.java new file mode 100644 index 0000000000000..1efd3f47fdff0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDependency.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Represents a dependency within a connector configuration, defining a specific field and its associated value. + * This class is used to encapsulate configuration dependencies in a structured format. + */ +public class ConfigurationDependency implements Writeable, ToXContentObject { + + private final String field; + private final Object value; + + /** + * Constructs a new instance of ConfigurationDependency. + * + * @param field The name of the field in the configuration dependency. + * @param value The value associated with the field. + */ + public ConfigurationDependency(String field, Object value) { + this.field = field; + this.value = value; + } + + public ConfigurationDependency(StreamInput in) throws IOException { + this.field = in.readString(); + this.value = in.readGenericValue(); + } + + private static final ParseField FIELD_FIELD = new ParseField("field"); + private static final ParseField VALUE_FIELD = new ParseField("value"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_configuration_dependency", + true, + args -> new ConfigurationDependency.Builder().setField((String) args[0]).setValue(args[1]).build() + ); + + static { + PARSER.declareString(constructorArg(), FIELD_FIELD); + PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + return p.booleanValue(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, VALUE_FIELD, ObjectParser.ValueType.VALUE); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(FIELD_FIELD.getPreferredName(), field); + builder.field(VALUE_FIELD.getPreferredName(), value); + } + builder.endObject(); + return builder; + } + + public static ConfigurationDependency fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeGenericValue(value); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConfigurationDependency that = (ConfigurationDependency) o; + return Objects.equals(field, that.field) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(field, value); + } + + public static class Builder { + + private String field; + private Object value; + + public Builder setField(String field) { + this.field = field; + return this; + } + + public Builder setValue(Object value) { + this.value = value; + return this; + } + + public ConfigurationDependency build() { + return new ConfigurationDependency(field, value); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java new file mode 100644 index 0000000000000..d6b3d83d705b9 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import java.util.Locale; + +public enum ConfigurationDisplayType { + TEXTBOX, + TEXTAREA, + NUMERIC, + TOGGLE, + DROPDOWN; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static ConfigurationDisplayType displayType(String type) { + for (ConfigurationDisplayType displayType : ConfigurationDisplayType.values()) { + if (displayType.name().equalsIgnoreCase(type)) { + return displayType; + } + } + throw new IllegalArgumentException("Unknown DisplayType: " + type); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java new file mode 100644 index 0000000000000..20162735985c6 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationFieldType.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +public enum ConfigurationFieldType { + STRING("str"), + INTEGER("int"), + LIST("list"), + BOOLEAN("bool"); + + private final String value; + + ConfigurationFieldType(String value) { + this.value = value; + } + + @Override + public String toString() { + return this.value; + } + + public static ConfigurationFieldType fieldType(String type) { + for (ConfigurationFieldType fieldType : ConfigurationFieldType.values()) { + if (fieldType.value.equals(type)) { + return fieldType; + } + } + throw new IllegalArgumentException("Unknown FieldType: " + type); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java new file mode 100644 index 0000000000000..ba281c69702e0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationSelectOption.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class ConfigurationSelectOption implements Writeable, ToXContentObject { + private final String label; + private final String value; + + private ConfigurationSelectOption(String label, String value) { + this.label = label; + this.value = value; + } + + public ConfigurationSelectOption(StreamInput in) throws IOException { + this.label = in.readString(); + this.value = in.readString(); + } + + private static final ParseField LABEL_FIELD = new ParseField("label"); + private static final ParseField VALUE_FIELD = new ParseField("value"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_configuration_select_option", + true, + args -> new ConfigurationSelectOption.Builder().setLabel((String) args[0]).setValue((String) args[1]).build() + ); + + static { + PARSER.declareString(constructorArg(), LABEL_FIELD); + PARSER.declareString(constructorArg(), VALUE_FIELD); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(LABEL_FIELD.getPreferredName(), label); + builder.field(VALUE_FIELD.getPreferredName(), value); + } + builder.endObject(); + return builder; + } + + public static ConfigurationSelectOption fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(label); + out.writeString(value); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConfigurationSelectOption that = (ConfigurationSelectOption) o; + return Objects.equals(label, that.label) && Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(label, value); + } + + public static class Builder { + + private String label; + private String value; + + public Builder setLabel(String label) { + this.label = label; + return this; + } + + public Builder setValue(String value) { + this.value = value; + return this; + } + + public ConfigurationSelectOption build() { + return new ConfigurationSelectOption(label, value); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java new file mode 100644 index 0000000000000..476ae113398dc --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidation.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +/** + * Represents a configuration validation entity, encapsulating a validation constraint and its corresponding type. + * This class is used to define and handle specific validation rules or requirements within a configuration context. + */ +public class ConfigurationValidation implements Writeable, ToXContentObject { + + private final Object constraint; + private final ConfigurationValidationType type; + + /** + * Constructs a new ConfigurationValidation instance with specified constraint and type. + * This constructor initializes the object with a given validation constraint and its associated validation type. + * + * @param constraint The validation constraint, represented as an Object. + * @param type The type of configuration validation, specified as an instance of {@link ConfigurationValidationType}. + */ + private ConfigurationValidation(Object constraint, ConfigurationValidationType type) { + this.constraint = constraint; + this.type = type; + } + + public ConfigurationValidation(StreamInput in) throws IOException { + this.constraint = in.readGenericValue(); + this.type = in.readEnum(ConfigurationValidationType.class); + } + + private static final ParseField CONSTRAINT_FIELD = new ParseField("constraint"); + private static final ParseField TYPE_FIELD = new ParseField("type"); + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_configuration_validation", + true, + args -> new ConfigurationValidation.Builder().setConstraint(args[0]).setType((ConfigurationValidationType) args[1]).build() + ); + + static { + PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return p.text(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.numberValue(); + } + throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + }, CONSTRAINT_FIELD, ObjectParser.ValueType.VALUE); + PARSER.declareField( + constructorArg(), + (p, c) -> ConfigurationValidationType.validationType(p.text()), + TYPE_FIELD, + ObjectParser.ValueType.STRING + ); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(CONSTRAINT_FIELD.getPreferredName(), constraint); + builder.field(TYPE_FIELD.getPreferredName(), type.toString()); + } + builder.endObject(); + return builder; + } + + public static ConfigurationValidation fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeGenericValue(constraint); + out.writeEnum(type); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConfigurationValidation that = (ConfigurationValidation) o; + return Objects.equals(constraint, that.constraint) && type == that.type; + } + + @Override + public int hashCode() { + return Objects.hash(constraint, type); + } + + public static class Builder { + + private Object constraint; + private ConfigurationValidationType type; + + public Builder setConstraint(Object constraint) { + this.constraint = constraint; + return this; + } + + public Builder setType(ConfigurationValidationType type) { + this.type = type; + return this; + } + + public ConfigurationValidation build() { + return new ConfigurationValidation(constraint, type); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java new file mode 100644 index 0000000000000..2118014f4a286 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationValidationType.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.configuration; + +import java.util.Locale; + +public enum ConfigurationValidationType { + LESS_THAN, + GREATER_THAN, + LIST_TYPE, + INCLUDED_IN, + REGEX, + UNSET; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static ConfigurationValidationType validationType(String type) { + for (ConfigurationValidationType displayType : ConfigurationValidationType.values()) { + if (displayType.name().equalsIgnoreCase(type)) { + return displayType; + } + } + throw new IllegalArgumentException("Unknown ValidationType: " + type); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 2a302ddb68199..0c6caa3376c7b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -23,6 +23,7 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorConfiguration; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; @@ -317,7 +318,7 @@ public ConnectorSyncJob(StreamInput in) throws IOException { .setLanguage((String) args[i++]) .setPipeline((ConnectorIngestPipeline) args[i++]) .setServiceType((String) args[i++]) - .setConfiguration((Map) args[i++]) + .setConfiguration((Map) args[i++]) .build(); } ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 5e1686dde80f2..6a7aec2fc7430 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -29,6 +29,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorConfiguration; import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; @@ -282,7 +283,9 @@ public void onResponse(GetResponse response) { .setLanguage((String) source.get(Connector.LANGUAGE_FIELD.getPreferredName())) .setPipeline((ConnectorIngestPipeline) source.get(Connector.PIPELINE_FIELD.getPreferredName())) .setServiceType((String) source.get(Connector.SERVICE_TYPE_FIELD.getPreferredName())) - .setConfiguration((Map) source.get(Connector.CONFIGURATION_FIELD.getPreferredName())) + .setConfiguration( + (Map) source.get(Connector.CONFIGURATION_FIELD.getPreferredName()) + ) .build(); listener.onResponse(syncJobConnectorInfo); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java new file mode 100644 index 0000000000000..44d9c0fcf9e76 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.hamcrest.CoreMatchers.equalTo; + +public class ConnectorConfigurationTests extends ESTestCase { + + private NamedWriteableRegistry namedWriteableRegistry; + + @Before + public void registerNamedObjects() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, emptyList()); + + List namedWriteables = searchModule.getNamedWriteables(); + namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); + } + + public final void testRandomSerialization() throws IOException { + for (int runs = 0; runs < 10; runs++) { + ConnectorConfiguration testInstance = ConnectorTestUtils.getRandomConnectorConfigurationField(); + assertTransportSerialization(testInstance); + } + } + + public void testToXContent() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "default_value": null, + "depends_on": [ + { + "field": "some_field", + "value": true + } + ], + "display": "textbox", + "label": "Very important field", + "options": [], + "order": 4, + "required": true, + "sensitive": false, + "tooltip": "Wow, this tooltip is useful.", + "type": "str", + "ui_restrictions": [], + "validations": [ + { + "constraint": 0, + "type": "greater_than" + } + ], + "value": "" + } + """); + + ConnectorConfiguration configuration = ConnectorConfiguration.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + + private void assertTransportSerialization(ConnectorConfiguration testInstance) throws IOException { + ConnectorConfiguration deserializedInstance = copyInstance(testInstance); + assertNotSame(testInstance, deserializedInstance); + assertThat(testInstance, equalTo(deserializedInstance)); + } + + private ConnectorConfiguration copyInstance(ConnectorConfiguration instance) throws IOException { + return copyWriteable(instance, namedWriteableRegistry, ConnectorConfiguration::new); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 0f2c6c3fa3e8e..ffa532012d982 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; @@ -24,6 +25,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -68,6 +70,29 @@ public void testDeleteConnector() throws Exception { expectThrows(ResourceNotFoundException.class, () -> awaitDeleteConnector(connectorIdToDelete)); } + public void testUpdateConnectorConfiguration() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + Map connectorConfiguration = connector.getConfiguration() + .entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, entry -> ConnectorTestUtils.getRandomConnectorConfigurationField())); + + UpdateConnectorConfigurationAction.Request updateConfigurationRequest = new UpdateConnectorConfigurationAction.Request( + connector.getConnectorId(), + connectorConfiguration + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorConfiguration(updateConfigurationRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + assertThat(connectorConfiguration, equalTo(indexedConnector.getConfiguration())); + assertThat(indexedConnector.getStatus(), equalTo(ConnectorStatus.CONFIGURED)); + } + public void testUpdateConnectorPipeline() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); DocWriteResponse resp = awaitPutConnector(connector); @@ -290,6 +315,32 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorConfiguration(UpdateConnectorConfigurationAction.Request updateConfiguration) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorConfiguration(updateConfiguration, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update configuration request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update configuration request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorFiltering(UpdateConnectorFilteringAction.Request updateFiltering) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 98d0112d8910f..3488c7d9c8ba7 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -8,6 +8,12 @@ package org.elasticsearch.xpack.application.connector; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDependency; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDisplayType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationFieldType; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationSelectOption; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationValidation; +import org.elasticsearch.xpack.application.connector.configuration.ConfigurationValidationType; import org.elasticsearch.xpack.application.connector.filtering.FilteringAdvancedSnippet; import org.elasticsearch.xpack.application.connector.filtering.FilteringPolicy; import org.elasticsearch.xpack.application.connector.filtering.FilteringRule; @@ -19,6 +25,7 @@ import java.time.Instant; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; @@ -177,10 +184,51 @@ public static Connector getRandomSyncJobConnectorInfo() { .build(); } + private static ConfigurationDependency getRandomConfigurationDependency() { + return new ConfigurationDependency.Builder().setField(randomAlphaOfLength(10)).setValue(randomAlphaOfLength(10)).build(); + } + + private static ConfigurationSelectOption getRandomConfigurationSelectOption() { + return new ConfigurationSelectOption.Builder().setLabel(randomAlphaOfLength(10)).setValue(randomAlphaOfLength(10)).build(); + } + + private static ConfigurationValidation getRandomConfigurationValidation() { + return new ConfigurationValidation.Builder().setConstraint(randomAlphaOfLength(10)) + .setType(getRandomConfigurationValidationType()) + .build(); + } + + public static ConnectorConfiguration getRandomConnectorConfigurationField() { + return new ConnectorConfiguration.Builder().setCategory(randomAlphaOfLength(10)) + .setDefaultValue(randomAlphaOfLength(10)) + .setDependsOn(List.of(getRandomConfigurationDependency())) + .setDisplay(getRandomConfigurationDisplayType()) + .setLabel(randomAlphaOfLength(10)) + .setOptions(List.of(getRandomConfigurationSelectOption(), getRandomConfigurationSelectOption())) + .setOrder(randomInt()) + .setPlaceholder(randomAlphaOfLength(10)) + .setRequired(randomBoolean()) + .setSensitive(randomBoolean()) + .setTooltip(randomAlphaOfLength(10)) + .setType(getRandomConfigurationFieldType()) + .setUiRestrictions(List.of(randomAlphaOfLength(10), randomAlphaOfLength(10))) + .setValidations(List.of(getRandomConfigurationValidation())) + .setValue(randomAlphaOfLength(10)) + .build(); + } + + public static Map getRandomConnectorConfiguration() { + Map configMap = new HashMap<>(); + for (int i = 0; i < 3; i++) { + configMap.put(randomAlphaOfLength(10), getRandomConnectorConfigurationField()); + } + return configMap; + } + public static Connector getRandomConnector() { return new Connector.Builder().setConnectorId(randomAlphaOfLength(10)) .setApiKeyId(randomFrom(new String[] { null, randomAlphaOfLength(10) })) - .setConfiguration(Collections.emptyMap()) + .setConfiguration(getRandomConnectorConfiguration()) .setCustomScheduling(Map.of(randomAlphaOfLengthBetween(5, 10), getRandomConnectorCustomSchedule())) .setDescription(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setError(randomFrom(new String[] { null, randomAlphaOfLength(10) })) @@ -251,4 +299,19 @@ private static FilteringValidationState getRandomFilteringValidationState() { FilteringValidationState[] values = FilteringValidationState.values(); return values[randomInt(values.length - 1)]; } + + private static ConfigurationDisplayType getRandomConfigurationDisplayType() { + ConfigurationDisplayType[] values = ConfigurationDisplayType.values(); + return values[randomInt(values.length - 1)]; + } + + private static ConfigurationFieldType getRandomConfigurationFieldType() { + ConfigurationFieldType[] values = ConfigurationFieldType.values(); + return values[randomInt(values.length - 1)]; + } + + private static ConfigurationValidationType getRandomConfigurationValidationType() { + ConfigurationValidationType[] values = ConfigurationValidationType.values(); + return values[randomInt(values.length - 1)]; + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java index a83537f32f413..481f50bb41711 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java @@ -49,135 +49,161 @@ public final void testRandomSerialization() throws IOException { public void testToXContent() throws IOException { String content = XContentHelper.stripWhitespace(""" { - "api_key_id": "test", - "connector_id": "test-connector", - "custom_scheduling": { - "schedule-key": { - "configuration_overrides": { - "domain_allowlist": [ - "https://example.com" - ], - "max_crawl_depth": 1, - "seed_urls": [ - "https://example.com/blog", - "https://example.com/info" - ], - "sitemap_discovery_disabled": true, - "sitemap_urls": [ - "https://example.com/sitemap.xml" - ] - }, - "enabled": true, - "interval": "0 0 12 * * ?", - "last_synced": null, - "name": "My Schedule" - } - }, - "configuration": {}, - "description": "test-connector", - "features": { - "document_level_security": { - "enabled": true - }, - "filtering_advanced_config": true, - "sync_rules": { - "advanced": { - "enabled": false + "api_key_id":"test", + "connector_id":"test-connector", + "custom_scheduling":{ + "schedule-key":{ + "configuration_overrides":{ + "domain_allowlist":[ + "https://example.com" + ], + "max_crawl_depth":1, + "seed_urls":[ + "https://example.com/blog", + "https://example.com/info" + ], + "sitemap_discovery_disabled":true, + "sitemap_urls":[ + "https://example.com/sitemap.xml" + ] + }, + "enabled":true, + "interval":"0 0 12 * * ?", + "last_synced":null, + "name":"My Schedule" + } + }, + "configuration":{ + "some_field":{ + "default_value":null, + "depends_on":[ + { + "field":"some_field", + "value":true + } + ], + "display":"textbox", + "label":"Very important field", + "options":[], + "order":4, + "required":true, + "sensitive":false, + "tooltip":"Wow, this tooltip is useful.", + "type":"str", + "ui_restrictions":[], + "validations":[ + { + "constraint":0, + "type":"greater_than" + } + ], + "value":"" + } + }, + "description":"test-connector", + "features":{ + "document_level_security":{ + "enabled":true + }, + "filtering_advanced_config":true, + "sync_rules":{ + "advanced":{ + "enabled":false + }, + "basic":{ + "enabled":true + } + } + }, + "filtering":[ + { + "active":{ + "advanced_snippet":{ + "created_at":"2023-11-09T15:13:08.231Z", + "updated_at":"2023-11-09T15:13:08.231Z", + "value":{} }, - "basic": { - "enabled": true + "rules":[ + { + "created_at":"2023-11-09T15:13:08.231Z", + "field":"_", + "id":"DEFAULT", + "order":0, + "policy":"include", + "rule":"regex", + "updated_at":"2023-11-09T15:13:08.231Z", + "value":".*" + } + ], + "validation":{ + "errors":[], + "state":"valid" } - } - }, - "filtering": [ - { - "active": { - "advanced_snippet": { - "created_at": "2023-11-09T15:13:08.231Z", - "updated_at": "2023-11-09T15:13:08.231Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-11-09T15:13:08.231Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-11-09T15:13:08.231Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } + }, + "domain":"DEFAULT", + "draft":{ + "advanced_snippet":{ + "created_at":"2023-11-09T15:13:08.231Z", + "updated_at":"2023-11-09T15:13:08.231Z", + "value":{} }, - "domain": "DEFAULT", - "draft": { - "advanced_snippet": { - "created_at": "2023-11-09T15:13:08.231Z", - "updated_at": "2023-11-09T15:13:08.231Z", - "value": {} - }, - "rules": [ - { - "created_at": "2023-11-09T15:13:08.231Z", - "field": "_", - "id": "DEFAULT", - "order": 0, - "policy": "include", - "rule": "regex", - "updated_at": "2023-11-09T15:13:08.231Z", - "value": ".*" - } - ], - "validation": { - "errors": [], - "state": "valid" - } + "rules":[ + { + "created_at":"2023-11-09T15:13:08.231Z", + "field":"_", + "id":"DEFAULT", + "order":0, + "policy":"include", + "rule":"regex", + "updated_at":"2023-11-09T15:13:08.231Z", + "value":".*" + } + ], + "validation":{ + "errors":[], + "state":"valid" } - } - ], - "index_name": "search-test", - "is_native": true, - "language": "polish", - "last_access_control_sync_error": "some error", - "last_access_control_sync_scheduled_at": "2023-11-09T15:13:08.231Z", - "last_access_control_sync_status": "pending", - "last_deleted_document_count": 42, - "last_incremental_sync_scheduled_at": "2023-11-09T15:13:08.231Z", - "last_indexed_document_count": 42, - "last_seen": "2023-11-09T15:13:08.231Z", - "last_sync_error": "some error", - "last_sync_scheduled_at": "2024-11-09T15:13:08.231Z", - "last_sync_status": "completed", - "last_synced": "2024-11-09T15:13:08.231Z", - "name": "test-name", - "pipeline": { - "extract_binary_content": true, - "name": "ent-search-generic-ingestion", - "reduce_whitespace": true, - "run_ml_inference": false - }, - "scheduling": { - "access_control": { - "enabled": false, - "interval": "0 0 0 * * ?" - }, - "full": { - "enabled": false, - "interval": "0 0 0 * * ?" - }, - "incremental": { - "enabled": false, - "interval": "0 0 0 * * ?" - } - }, - "service_type": "google_drive", - "status": "needs_configuration", - "sync_now": false + } + } + ], + "index_name":"search-test", + "is_native":true, + "language":"polish", + "last_access_control_sync_error":"some error", + "last_access_control_sync_scheduled_at":"2023-11-09T15:13:08.231Z", + "last_access_control_sync_status":"pending", + "last_deleted_document_count":42, + "last_incremental_sync_scheduled_at":"2023-11-09T15:13:08.231Z", + "last_indexed_document_count":42, + "last_seen":"2023-11-09T15:13:08.231Z", + "last_sync_error":"some error", + "last_sync_scheduled_at":"2024-11-09T15:13:08.231Z", + "last_sync_status":"completed", + "last_synced":"2024-11-09T15:13:08.231Z", + "name":"test-name", + "pipeline":{ + "extract_binary_content":true, + "name":"ent-search-generic-ingestion", + "reduce_whitespace":true, + "run_ml_inference":false + }, + "scheduling":{ + "access_control":{ + "enabled":false, + "interval":"0 0 0 * * ?" + }, + "full":{ + "enabled":false, + "interval":"0 0 0 * * ?" + }, + "incremental":{ + "enabled":false, + "interval":"0 0 0 * * ?" + } + }, + "service_type":"google_drive", + "status":"needs_configuration", + "sync_now":false }"""); Connector connector = Connector.fromXContentBytes(new BytesArray(content), XContentType.JSON); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..7e8b026a23105 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionRequestBWCSerializingTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorConfigurationActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorConfigurationAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorConfigurationAction.Request::new; + } + + @Override + protected UpdateConnectorConfigurationAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorConfigurationAction.Request(connectorId, ConnectorTestUtils.getRandomConnectorConfiguration()); + } + + @Override + protected UpdateConnectorConfigurationAction.Request mutateInstance(UpdateConnectorConfigurationAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorConfigurationAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorConfigurationAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorConfigurationAction.Request mutateInstanceForVersion( + UpdateConnectorConfigurationAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..d4aa4f12b36d3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorConfigurationActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorConfigurationAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorConfigurationAction.Response::new; + } + + @Override + protected UpdateConnectorConfigurationAction.Response createTestInstance() { + return new UpdateConnectorConfigurationAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorConfigurationAction.Response mutateInstance(UpdateConnectorConfigurationAction.Response instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorConfigurationAction.Response mutateInstanceForVersion( + UpdateConnectorConfigurationAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 3409f549cb579..44399e84ee6e0 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -127,6 +127,7 @@ public class Constants { "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", "cluster:admin/xpack/connector/put", + "cluster:admin/xpack/connector/update_configuration", "cluster:admin/xpack/connector/update_error", "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_last_seen", From f6fcfe51d0fb6fe801f29a3025dacb9752c97f3f Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Mon, 4 Dec 2023 17:32:08 -0500 Subject: [PATCH 192/263] [ML] If trained model download task is in progress, wait for it to finish before executing start trained model deployment (#102944) --- docs/changelog/102944.yaml | 6 + .../xpack/core/ml/job/messages/Messages.java | 2 + .../TransportGetTrainedModelsAction.java | 1 + ...portStartTrainedModelDeploymentAction.java | 289 +++++++++++------- .../test/ml/3rd_party_deployment.yml | 70 +++-- 5 files changed, 243 insertions(+), 125 deletions(-) create mode 100644 docs/changelog/102944.yaml diff --git a/docs/changelog/102944.yaml b/docs/changelog/102944.yaml new file mode 100644 index 0000000000000..58a1bb8f6bbaa --- /dev/null +++ b/docs/changelog/102944.yaml @@ -0,0 +1,6 @@ +pr: 102944 +summary: "If trained model download task is in progress, wait for it to finish before\ + \ executing start trained model deployment" +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index ce26bb53dc223..36b4c0f1815ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -123,6 +123,8 @@ public final class Messages { "Unable to delete model [{0}] as it is required by machine learning"; public static final String MODEL_DEFINITION_TRUNCATED = "Model definition truncated. Unable to deserialize trained model definition [{0}]"; + public static final String MODEL_DOWNLOAD_IN_PROGRESS = + "Model download task is currently running. Wait for trained model [{0}] download task to complete then try again"; public static final String UNABLE_TO_DEPLOY_MODEL_BAD_PARTS = "Unable to deploy model, please delete and recreate the model definition"; public static final String INFERENCE_FAILED_TO_DESERIALIZE = "Could not deserialize trained model [{0}]"; public static final String INFERENCE_TOO_MANY_DEFINITIONS_REQUESTED = diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java index cda76d3c3ee1d..e6d1fe30d7646 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java @@ -89,6 +89,7 @@ protected void doExecute(Task task, Request request, ActionListener li new OriginSettingClient(client, ML_ORIGIN), configs.get(0), false, // missing docs are not an error + null, // if download is in progress, don't wait for it to complete ActionListener.wrap(modelIdAndLength -> { configs.get(0).setFullDefinition(modelIdAndLength.v2() > 0); listener.onResponse(responseBuilder.setModels(configs).build()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 113a093b3ae65..4a569b374582a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a generative AI */ package org.elasticsearch.xpack.ml.action; @@ -13,6 +15,7 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.Client; @@ -64,6 +67,7 @@ import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; +import org.elasticsearch.xpack.ml.utils.TaskRetriever; import java.util.Collection; import java.util.HashMap; @@ -269,7 +273,13 @@ protected void masterOperation( error -> { if (ExceptionsHelper.unwrapCause(error) instanceof ResourceNotFoundException) { // no name clash, continue with the deployment - checkFullModelDefinitionIsPresent(client, trainedModelConfig, true, modelSizeListener); + checkFullModelDefinitionIsPresent( + client, + trainedModelConfig, + true, + request.getTimeout(), + modelSizeListener + ); } else { listener.onFailure(error); } @@ -277,7 +287,7 @@ protected void masterOperation( ) ); } else { - checkFullModelDefinitionIsPresent(client, trainedModelConfig, true, modelSizeListener); + checkFullModelDefinitionIsPresent(client, trainedModelConfig, true, request.getTimeout(), modelSizeListener); } }, listener::onFailure); @@ -342,27 +352,30 @@ private void deleteFailedDeployment( * individual per-document definition lengths and checking * the total is equal to the total definition length as * stored in the docs. - * + *

    * On success the response is a tuple * {@code (model id, total definition length)} - * + *

    * If {@code errorIfDefinitionIsMissing == false} and some * definition docs are missing then {@code listener::onResponse} * is called with the total definition length == 0. * This usage is to answer yes/no questions if the full model * definition is present. * - * @param mlOriginClient A client using ML_ORIGIN - * @param config trained model config + * @param mlOriginClient A client using ML_ORIGIN + * @param config trained model config * @param errorIfDefinitionIsMissing If true missing definition parts cause errors. * If false and some parts are missing the total * definition length in the response is set to 0. - * @param listener response listener + * @param timeout The timeout value in seconds that the request should fail if it does not complete. + * If null, then this will not wait for the download to complete before returning. + * @param listener response listener */ static void checkFullModelDefinitionIsPresent( OriginSettingClient mlOriginClient, TrainedModelConfig config, boolean errorIfDefinitionIsMissing, + TimeValue timeout, ActionListener> listener ) { if (config.getLocation() instanceof IndexLocation == false) { @@ -373,27 +386,86 @@ static void checkFullModelDefinitionIsPresent( final String modelId = config.getModelId(); String index = ((IndexLocation) config.getLocation()).getIndexName(); - mlOriginClient.prepareSearch(index) - .setQuery( - QueryBuilders.constantScoreQuery( - QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(TrainedModelConfig.MODEL_ID.getPreferredName(), modelId)) - .filter( - QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelDefinitionDoc.NAME) - ) - ) - ) - .setFetchSource(false) - .addDocValueField(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()) - .addDocValueField(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()) - .setSize(MAX_NUM_NATIVE_DEFINITION_PARTS) - .setTrackTotalHits(true) - .addSort(SortBuilders.fieldSort(TrainedModelDefinitionDoc.DOC_NUM.getPreferredName()).order(SortOrder.ASC).unmappedType("long")) - .execute(ActionListener.wrap(response -> { - SearchHit[] hits = response.getHits().getHits(); - if (hits.length == 0) { + + // Step 3 + // Verify the model definition parts are all present + ActionListener step3SearchResultsVerificationListener = step3VerifyModelPartsArePresent( + errorIfDefinitionIsMissing, + listener, + modelId + ); + + // Step 2 + // Search for the model definition parts to ensure they are all present + ActionListener step2DocsSearchListener = step2SearchForModelParts( + mlOriginClient, + listener, + index, + modelId, + step3SearchResultsVerificationListener + ); + + // Step 1 (there is no step zero) + // Check if there is a download task for this model, and wait for it to complete or timeout + step1CheckForDownloadTask(mlOriginClient, errorIfDefinitionIsMissing, timeout, listener, modelId, step2DocsSearchListener); + } + + private static ActionListener step3VerifyModelPartsArePresent( + boolean errorIfDefinitionIsMissing, + ActionListener> listener, + String modelId + ) { + return ActionListener.wrap(response -> { + SearchHit[] hits = response.getHits().getHits(); + if (hits.length == 0) { + failOrRespondWith0( + () -> new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)), + errorIfDefinitionIsMissing, + modelId, + listener + ); + return; + } + + long firstTotalLength; + DocumentField firstTotalLengthField = hits[0].field(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); + if (firstTotalLengthField != null && firstTotalLengthField.getValue() instanceof Long firstTotalDefinitionLength) { + firstTotalLength = firstTotalDefinitionLength; + } else { + failOrRespondWith0( + () -> missingFieldsError( + modelId, + hits[0].getId(), + List.of(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()) + ), + errorIfDefinitionIsMissing, + modelId, + listener + ); + return; + } + + Set missingFields = new HashSet<>(); + long summedLengths = 0; + for (SearchHit hit : hits) { + long totalLength = -1; + DocumentField totalLengthField = hit.field(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); + if (totalLengthField != null && totalLengthField.getValue() instanceof Long totalDefinitionLength) { + totalLength = totalDefinitionLength; + } else { + missingFields.add(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); + } + + DocumentField definitionLengthField = hit.field(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()); + if (definitionLengthField != null && definitionLengthField.getValue() instanceof Long definitionLength) { + summedLengths += definitionLength; + } else { + missingFields.add(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()); + } + + if (missingFields.isEmpty() == false) { failOrRespondWith0( - () -> new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)), + () -> missingFieldsError(modelId, hit.getId(), missingFields), errorIfDefinitionIsMissing, modelId, listener @@ -401,16 +473,18 @@ static void checkFullModelDefinitionIsPresent( return; } - long firstTotalLength; - DocumentField firstTotalLengthField = hits[0].field(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); - if (firstTotalLengthField != null && firstTotalLengthField.getValue() instanceof Long firstTotalDefinitionLength) { - firstTotalLength = firstTotalDefinitionLength; - } else { + if (totalLength != firstTotalLength) { + final long finalTotalLength = totalLength; failOrRespondWith0( - () -> missingFieldsError( + () -> ExceptionsHelper.badRequestException( + "[{}] [total_definition_length] must be the same in all model definition parts. " + + "The value [{}] in model definition part [{}] does not match the value [{}] in part [{}]. " + + Messages.UNABLE_TO_DEPLOY_MODEL_BAD_PARTS, modelId, - hits[0].getId(), - List.of(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()) + finalTotalLength, + TrainedModelDefinitionDoc.docNum(modelId, Objects.requireNonNull(hit.getId())), + firstTotalLength, + TrainedModelDefinitionDoc.docNum(modelId, Objects.requireNonNull(hits[0].getId())) ), errorIfDefinitionIsMissing, modelId, @@ -419,76 +493,87 @@ static void checkFullModelDefinitionIsPresent( return; } - Set missingFields = new HashSet<>(); - long summedLengths = 0; - for (SearchHit hit : hits) { - long totalLength = -1; - DocumentField totalLengthField = hit.field(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); - if (totalLengthField != null && totalLengthField.getValue() instanceof Long totalDefinitionLength) { - totalLength = totalDefinitionLength; - } else { - missingFields.add(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()); - } - - DocumentField definitionLengthField = hit.field(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()); - if (definitionLengthField != null && definitionLengthField.getValue() instanceof Long definitionLength) { - summedLengths += definitionLength; - } else { - missingFields.add(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()); - } + } + if (summedLengths != firstTotalLength) { + failOrRespondWith0( + () -> ExceptionsHelper.badRequestException(Messages.getMessage(Messages.MODEL_DEFINITION_TRUNCATED, modelId)), + errorIfDefinitionIsMissing, + modelId, + listener + ); + return; + } + listener.onResponse(new Tuple<>(modelId, summedLengths)); + }, e -> { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + failOrRespondWith0(() -> { + Exception ex = new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)); + ex.addSuppressed(e); + return ex; + }, errorIfDefinitionIsMissing, modelId, listener); + } else { + listener.onFailure(e); + } + }); + } - if (missingFields.isEmpty() == false) { - failOrRespondWith0( - () -> missingFieldsError(modelId, hit.getId(), missingFields), - errorIfDefinitionIsMissing, - modelId, - listener - ); - return; - } + private static ActionListener step2SearchForModelParts( + OriginSettingClient mlOriginClient, + ActionListener> listener, + String index, + String modelId, + ActionListener nextStepListener + ) { + return ActionListener.wrap(r -> { + mlOriginClient.prepareSearch(index) + .setQuery( + QueryBuilders.constantScoreQuery( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(TrainedModelConfig.MODEL_ID.getPreferredName(), modelId)) + .filter( + QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelDefinitionDoc.NAME) + ) + ) + ) + .setFetchSource(false) + .addDocValueField(TrainedModelDefinitionDoc.DEFINITION_LENGTH.getPreferredName()) + .addDocValueField(TrainedModelDefinitionDoc.TOTAL_DEFINITION_LENGTH.getPreferredName()) + .setSize(MAX_NUM_NATIVE_DEFINITION_PARTS) + .setTrackTotalHits(true) + .addSort( + SortBuilders.fieldSort(TrainedModelDefinitionDoc.DOC_NUM.getPreferredName()).order(SortOrder.ASC).unmappedType("long") + ) + .execute(nextStepListener); - if (totalLength != firstTotalLength) { - final long finalTotalLength = totalLength; - failOrRespondWith0( - () -> ExceptionsHelper.badRequestException( - "[{}] [total_definition_length] must be the same in all model definition parts. " - + "The value [{}] in model definition part [{}] does not match the value [{}] in part [{}]. " - + Messages.UNABLE_TO_DEPLOY_MODEL_BAD_PARTS, - modelId, - finalTotalLength, - TrainedModelDefinitionDoc.docNum(modelId, Objects.requireNonNull(hit.getId())), - firstTotalLength, - TrainedModelDefinitionDoc.docNum(modelId, Objects.requireNonNull(hits[0].getId())) - ), - errorIfDefinitionIsMissing, - modelId, - listener - ); - return; - } + }, listener::onFailure); + } - } - if (summedLengths != firstTotalLength) { - failOrRespondWith0( - () -> ExceptionsHelper.badRequestException(Messages.getMessage(Messages.MODEL_DEFINITION_TRUNCATED, modelId)), - errorIfDefinitionIsMissing, - modelId, - listener - ); - return; - } - listener.onResponse(new Tuple<>(modelId, summedLengths)); - }, e -> { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - failOrRespondWith0(() -> { - Exception ex = new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId)); - ex.addSuppressed(e); - return ex; - }, errorIfDefinitionIsMissing, modelId, listener); - } else { - listener.onFailure(e); - } - })); + /* + @param timeout null value indicates that the request should not wait for the download to complete before returning + */ + private static void step1CheckForDownloadTask( + OriginSettingClient mlOriginClient, + boolean errorIfDefinitionIsMissing, + TimeValue timeout, + ActionListener> failureListener, + String modelId, + ActionListener nextStepListener + ) { + TaskRetriever.getDownloadTaskInfo(mlOriginClient, modelId, timeout != null, ActionListener.wrap(taskInfo -> { + if (taskInfo == null) { + nextStepListener.onResponse(null); + } else { + failOrRespondWith0( + () -> new ElasticsearchStatusException( + Messages.getMessage(Messages.MODEL_DOWNLOAD_IN_PROGRESS, modelId), + RestStatus.REQUEST_TIMEOUT + ), + errorIfDefinitionIsMissing, + modelId, + failureListener + ); + } + }, failureListener::onFailure), timeout); } private static void failOrRespondWith0( diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml index 1fa675ff4284f..2b9bc06bdd1d0 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml @@ -153,11 +153,11 @@ setup: - do: ml.get_trained_models: model_id: "bert_fill_mask_model" - - match: {trained_model_configs.0.inference_config.fill_mask.mask_token: "[MASK]"} + - match: { trained_model_configs.0.inference_config.fill_mask.mask_token: "[MASK]" } - do: ml.get_trained_models: model_id: "roberta_fill_mask_model" - - match: {trained_model_configs.0.inference_config.fill_mask.mask_token: ""} + - match: { trained_model_configs.0.inference_config.fill_mask.mask_token: "" } - do: catch: /IllegalArgumentException. Mask token requested was \[\] but must be \[\[MASK\]\] for this model/ ml.put_trained_model: @@ -215,6 +215,30 @@ setup: catch: /Could not find trained model definition \[distilbert-finetuned-sst\]/ ml.start_trained_model_deployment: model_id: distilbert-finetuned-sst + +--- +"Test start deployment fails while model download in progress": + + - do: + ml.put_trained_model: + model_id: .elser_model_2 + body: > + { + "input": { + "field_names": ["text_field"] + } + } + - do: + catch: /Model download task is currently running\. Wait for trained model \[.elser_model_2\] download task to complete then try again/ + ml.start_trained_model_deployment: + model_id: .elser_model_2 + - do: + ml.delete_trained_model: + model_id: .elser_model_2 + - do: + catch: /No known trained model with model_id \[.elser_model_2\]/ + ml.start_trained_model_deployment: + model_id: .elser_model_2 --- "Test start and stop deployment with no cache": - do: @@ -222,9 +246,9 @@ setup: model_id: test_model cache_size: 0 wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.cache_size: "0"} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.cache_size: "0" } - do: ml.stop_trained_model_deployment: @@ -240,9 +264,9 @@ setup: model_id: test_model cache_size: 10kb wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.cache_size: 10kb} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.cache_size: 10kb } - do: allowed_warnings: @@ -380,9 +404,9 @@ setup: deployment_id: test_model_deployment_cache_test cache_size: 10kb wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.cache_size: 10kb} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.cache_size: 10kb } - do: allowed_warnings: @@ -535,7 +559,7 @@ setup: model_id: test_model deployment_id: test_model_for_search wait_for: started - - match: {assignment.assignment_state: started} + - match: { assignment.assignment_state: started } - do: ml.stop_trained_model_deployment: @@ -565,9 +589,9 @@ setup: deployment_id: test_model_for_search priority: low wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.deployment_id: test_model_for_search} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.deployment_id: test_model_for_search } - do: allowed_warnings: - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' @@ -586,9 +610,9 @@ setup: deployment_id: test_model_for_ingest priority: low wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.deployment_id: test_model_for_ingest} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.deployment_id: test_model_for_ingest } - do: allowed_warnings: - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' @@ -625,7 +649,7 @@ setup: model_id: test_model deployment_id: test_model_deployment wait_for: started - - match: {assignment.assignment_state: started} + - match: { assignment.assignment_state: started } - do: catch: /Could not start model deployment because an existing deployment with the same id \[test_model_deployment\] exist/ @@ -648,9 +672,9 @@ setup: ml.start_trained_model_deployment: model_id: test_model wait_for: started - - match: {assignment.assignment_state: started} - - match: {assignment.task_parameters.model_id: test_model} - - match: {assignment.task_parameters.deployment_id: test_model} + - match: { assignment.assignment_state: started } + - match: { assignment.task_parameters.model_id: test_model } + - match: { assignment.task_parameters.deployment_id: test_model } --- "Test cannot create model with a deployment Id": @@ -659,7 +683,7 @@ setup: model_id: test_model wait_for: started deployment_id: test_model_deployment - - match: {assignment.assignment_state: started} + - match: { assignment.assignment_state: started } - do: catch: /Cannot create model \[test_model_deployment\] the id is the same as an current model deployment/ From b69f78da5fd5b468be1a4b8c892558231db8bee5 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 4 Dec 2023 17:40:23 -0500 Subject: [PATCH 193/263] Optimize _count type API requests (#102888) When calling `_count` or simply `_search?size=0&track_total_hits=true` with no aggregations, we end up doing too much work on the coordinator, allocating large arrays and keeping unnecessary references to various objects. This commit optimizes this scenario by only collecting the count and allowing result references to be collected. --- docs/changelog/102888.yaml | 5 + .../rest-api-spec/test/count/30_min_score.yml | 42 +++++++ .../search/simple/SimpleSearchIT.java | 32 +++++ .../CountOnlyQueryPhaseResultConsumer.java | 118 ++++++++++++++++++ .../action/search/CountedCollector.java | 4 +- .../action/search/DfsQueryPhase.java | 8 +- .../SearchDfsQueryThenFetchAsyncAction.java | 5 +- .../action/search/SearchPhaseController.java | 15 ++- .../SearchQueryThenFetchAsyncAction.java | 2 +- .../action/search/TransportSearchAction.java | 3 +- .../org/elasticsearch/common/util/Maps.java | 13 ++ .../action/search/DfsQueryPhaseTests.java | 12 +- .../action/search/FetchSearchPhaseTests.java | 13 +- .../search/SearchPhaseControllerTests.java | 22 ++-- 14 files changed, 260 insertions(+), 34 deletions(-) create mode 100644 docs/changelog/102888.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml create mode 100644 server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java diff --git a/docs/changelog/102888.yaml b/docs/changelog/102888.yaml new file mode 100644 index 0000000000000..79ea9cbe712de --- /dev/null +++ b/docs/changelog/102888.yaml @@ -0,0 +1,5 @@ +pr: 102888 +summary: "Optimize `_count` type API requests" +area: Search +type: enhancement +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml new file mode 100644 index 0000000000000..278a7095add5e --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/30_min_score.yml @@ -0,0 +1,42 @@ +--- +"count with min_score": + - do: + indices.create: + index: test_count_min_score + + - do: + index: + index: test_count_min_score + id: "1" + body: { field: foo bar } + + - do: + index: + index: test_count_min_score + id: "2" + body: { field: foo bar bar bar bar } + + - do: + indices.refresh: + index: [test_count_min_score] + + - do: + count: + index: test_count_min_score + q: field:foo + min_score: 0.2 + - match: {count : 1} + + - do: + count: + index: test_count_min_score + q: field:foo + min_score: 0.1 + - match: { count: 2 } + + - do: + count: + index: test_count_min_score + q: field:foo + min_score: 0.5 + - match: { count: 0 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index f47303b83b6e3..cb13fca85541f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -493,6 +494,37 @@ public void testTooLongRegexInRegexpQuery() throws Exception { ); } + public void testStrictlyCountRequest() throws Exception { + createIndex("test_count_1"); + indexRandom( + true, + prepareIndex("test_count_1").setId("1").setSource("field", "value"), + prepareIndex("test_count_1").setId("2").setSource("field", "value"), + prepareIndex("test_count_1").setId("3").setSource("field", "value"), + prepareIndex("test_count_1").setId("4").setSource("field", "value"), + prepareIndex("test_count_1").setId("5").setSource("field", "value"), + prepareIndex("test_count_1").setId("6").setSource("field", "value") + ); + + createIndex("test_count_2"); + indexRandom( + true, + prepareIndex("test_count_2").setId("1").setSource("field", "value_2"), + prepareIndex("test_count_2").setId("2").setSource("field", "value_2"), + prepareIndex("test_count_2").setId("3").setSource("field", "value_2"), + prepareIndex("test_count_2").setId("4").setSource("field", "value_2"), + prepareIndex("test_count_2").setId("6").setSource("field", "value_2") + ); + assertNoFailuresAndResponse( + prepareSearch("test_count_1", "test_count_2").setTrackTotalHits(true).setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(11L)); + assertThat(response.getHits().getHits().length, equalTo(0)); + } + ); + + } + private void assertWindowFails(SearchRequestBuilder search) { SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> search.get()); assertThat( diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java new file mode 100644 index 0000000000000..1e67522f6a671 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.search; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.search.SearchPhaseResult; + +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.atomic.LongAdder; +import java.util.stream.Stream; + +/** + * Optimized phase result consumer that only counts the number of hits and does not + * store any other information. + */ +class CountOnlyQueryPhaseResultConsumer extends SearchPhaseResults { + AtomicReference relationAtomicReference = new AtomicReference<>(TotalHits.Relation.EQUAL_TO); + LongAdder totalHits = new LongAdder(); + + private final AtomicBoolean terminatedEarly = new AtomicBoolean(false); + private final AtomicBoolean timedOut = new AtomicBoolean(false); + private final Set results; + private final SearchProgressListener progressListener; + + CountOnlyQueryPhaseResultConsumer(SearchProgressListener progressListener, int numShards) { + super(numShards); + this.progressListener = progressListener; + this.results = Collections.newSetFromMap(Maps.newConcurrentHashMapWithExpectedSize(numShards)); + } + + @Override + Stream getSuccessfulResults() { + return Stream.empty(); + } + + @Override + public void consumeResult(SearchPhaseResult result, Runnable next) { + assert results.contains(result.getShardIndex()) == false : "shardIndex: " + result.getShardIndex() + " is already set"; + results.add(result.getShardIndex()); + // set the relation to the first non-equal relation + relationAtomicReference.compareAndSet(TotalHits.Relation.EQUAL_TO, result.queryResult().getTotalHits().relation); + totalHits.add(result.queryResult().getTotalHits().value); + terminatedEarly.compareAndSet(false, (result.queryResult().terminatedEarly() != null && result.queryResult().terminatedEarly())); + timedOut.compareAndSet(false, result.queryResult().searchTimedOut()); + progressListener.notifyQueryResult(result.getShardIndex(), result.queryResult()); + next.run(); + } + + @Override + boolean hasResult(int shardIndex) { + return results.contains(shardIndex); + } + + @Override + public SearchPhaseController.ReducedQueryPhase reduce() throws Exception { + SearchPhaseController.ReducedQueryPhase reducePhase = new SearchPhaseController.ReducedQueryPhase( + new TotalHits(totalHits.sum(), relationAtomicReference.get()), + 0, + Float.NaN, + timedOut.get(), + terminatedEarly.get(), + null, + null, + null, + SearchPhaseController.SortedTopDocs.EMPTY, + null, + null, + 1, + 0, + 0, + false + ); + if (progressListener != SearchProgressListener.NOOP) { + progressListener.notifyFinalReduce( + List.of(), + reducePhase.totalHits(), + reducePhase.aggregations(), + reducePhase.numReducePhases() + ); + } + return reducePhase; + } + + @Override + AtomicArray getAtomicArray() { + return new AtomicArray<>(0); + } + + @Override + public void incRef() {} + + @Override + public boolean tryIncRef() { + return true; + } + + @Override + public boolean decRef() { + return true; + } + + @Override + public boolean hasReferences() { + return false; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java b/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java index d5605b280f385..3a12b72570caf 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java @@ -18,12 +18,12 @@ * where the given index is used to set the result on the array. */ final class CountedCollector { - private final ArraySearchPhaseResults resultConsumer; + private final SearchPhaseResults resultConsumer; private final CountDown counter; private final Runnable onFinish; private final SearchPhaseContext context; - CountedCollector(ArraySearchPhaseResults resultConsumer, int expectedOps, Runnable onFinish, SearchPhaseContext context) { + CountedCollector(SearchPhaseResults resultConsumer, int expectedOps, Runnable onFinish, SearchPhaseContext context) { this.resultConsumer = resultConsumer; resultConsumer.incRef(); this.counter = new CountDown(expectedOps); diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index ce2c86be4b4e6..54408cd560314 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -37,11 +37,11 @@ * @see CountedCollector#onFailure(int, SearchShardTarget, Exception) */ final class DfsQueryPhase extends SearchPhase { - private final QueryPhaseResultConsumer queryResult; + private final SearchPhaseResults queryResult; private final List searchResults; private final AggregatedDfs dfs; private final List knnResults; - private final Function, SearchPhase> nextPhaseFactory; + private final Function, SearchPhase> nextPhaseFactory; private final SearchPhaseContext context; private final SearchTransportService searchTransportService; private final SearchProgressListener progressListener; @@ -50,8 +50,8 @@ final class DfsQueryPhase extends SearchPhase { List searchResults, AggregatedDfs dfs, List knnResults, - QueryPhaseResultConsumer queryResult, - Function, SearchPhase> nextPhaseFactory, + SearchPhaseResults queryResult, + Function, SearchPhase> nextPhaseFactory, SearchPhaseContext context ) { super("dfs_query"); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 2fcb792f821c9..68d1bec590318 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsKnnResults; @@ -26,7 +27,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction { - private final QueryPhaseResultConsumer queryPhaseResultConsumer; + private final SearchPhaseResults queryPhaseResultConsumer; private final SearchProgressListener progressListener; SearchDfsQueryThenFetchAsyncAction( @@ -36,7 +37,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction final Map aliasFilter, final Map concreteIndexBoosts, final Executor executor, - final QueryPhaseResultConsumer queryPhaseResultConsumer, + final SearchPhaseResults queryPhaseResultConsumer, final SearchRequest request, final ActionListener listener, final GroupShardsIterator shardsIts, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index e262003935969..d4808def29d1f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -783,7 +783,7 @@ AggregationReduceContext.Builder getReduceContext(Supplier isCanceled, /** * Returns a new {@link QueryPhaseResultConsumer} instance that reduces search responses incrementally. */ - QueryPhaseResultConsumer newSearchPhaseResults( + SearchPhaseResults newSearchPhaseResults( Executor executor, CircuitBreaker circuitBreaker, Supplier isCanceled, @@ -792,6 +792,19 @@ QueryPhaseResultConsumer newSearchPhaseResults( int numShards, Consumer onPartialMergeFailure ) { + final int size = request.source() == null || request.source().size() == -1 ? SearchService.DEFAULT_SIZE : request.source().size(); + // Use CountOnlyQueryPhaseResultConsumer for requests without aggs, suggest, etc. things only wanting a total count and + // returning no hits + if (size == 0 + && (request.source() == null + || (request.source().aggregations() == null + && request.source().suggest() == null + && request.source().rankBuilder() == null + && request.source().knnSearch().isEmpty() + && request.source().profile() == false)) + && request.resolveTrackTotalHitsUpTo() == SearchContext.TRACK_TOTAL_HITS_ACCURATE) { + return new CountOnlyQueryPhaseResultConsumer(listener, numShards); + } return new QueryPhaseResultConsumer( request, executor, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 8cf4ee9b75f76..51d330f55aee1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -43,7 +43,7 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction aliasFilter, final Map concreteIndexBoosts, final Executor executor, - final QueryPhaseResultConsumer resultConsumer, + final SearchPhaseResults resultConsumer, final SearchRequest request, final ActionListener listener, final GroupShardsIterator shardsIts, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 9010fa1ea0e75..1b3b321a530e6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -60,6 +60,7 @@ import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -1286,7 +1287,7 @@ public SearchPhase newSearchPhase( && task.getProgressListener() == SearchProgressListener.NOOP) { task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); } - final QueryPhaseResultConsumer queryResultConsumer = searchPhaseController.newSearchPhaseResults( + final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( executor, circuitBreaker, task::isCancelled, diff --git a/server/src/main/java/org/elasticsearch/common/util/Maps.java b/server/src/main/java/org/elasticsearch/common/util/Maps.java index da5089983ceb5..1b46e71dadd12 100644 --- a/server/src/main/java/org/elasticsearch/common/util/Maps.java +++ b/server/src/main/java/org/elasticsearch/common/util/Maps.java @@ -18,6 +18,7 @@ import java.util.Objects; import java.util.Set; import java.util.TreeMap; +import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collector; @@ -281,6 +282,18 @@ public static Map newHashMapWithExpectedSize(int expectedSize) { return new HashMap<>(capacity(expectedSize)); } + /** + * Returns a concurrent hash map with a capacity sufficient to keep expectedSize elements without being resized. + * + * @param expectedSize the expected amount of elements in the map + * @param the key type + * @param the value type + * @return a new pre-sized {@link HashMap} + */ + public static Map newConcurrentHashMapWithExpectedSize(int expectedSize) { + return new ConcurrentHashMap<>(capacity(expectedSize)); + } + /** * Returns a linked hash map with a capacity sufficient to keep expectedSize elements without being resized. * diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 21c1e9b0470b5..b14d24cf95f62 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -125,7 +125,7 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -138,7 +138,7 @@ public void sendExecuteQuery( DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(response.results); + responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); assertEquals("dfs_query", phase.getName()); @@ -211,7 +211,7 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -224,7 +224,7 @@ public void sendExecuteQuery( DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(response.results); + responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); assertEquals("dfs_query", phase.getName()); @@ -299,7 +299,7 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -312,7 +312,7 @@ public void sendExecuteQuery( DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(response.results); + responseRef.set(((QueryPhaseResultConsumer) response).results); } }, mockSearchPhaseContext); assertEquals("dfs_query", phase.getName()); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 3d66c4bc2793f..24b2610c8d190 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; @@ -49,7 +50,7 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testShortcutQueryAndFetchOptimization() { SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -139,7 +140,7 @@ private void assertProfiles(boolean profiled, int totalShards, SearchResponse se public void testFetchTwoDocument() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -256,7 +257,7 @@ public void run() { public void testFailFetchOneDoc() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -384,7 +385,7 @@ public void testFetchDocsConcurrently() throws InterruptedException { boolean profiled = randomBoolean(); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -495,7 +496,7 @@ public void run() { public void testExceptionFailsPhase() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -606,7 +607,7 @@ public void run() { public void testCleanupIrrelevantContexts() { // contexts that are not fetched should be cleaned up MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - QueryPhaseResultConsumer results = controller.newSearchPhaseResults( + SearchPhaseResults results = controller.newSearchPhaseResults( EsExecutors.DIRECT_EXECUTOR_SERVICE, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 0dcb6abe3a86e..cd86a2e4f55d6 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -609,7 +609,7 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test"))); request.setBatchedReduceSize(bufferSize); - ArraySearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -734,7 +734,7 @@ public void testConsumerConcurrently() throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test"))); request.setBatchedReduceSize(bufferSize); - ArraySearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -807,7 +807,7 @@ public void testConsumerOnlyAggs() throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test")).size(0)); request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -869,7 +869,7 @@ public void testConsumerOnlyHits() throws Exception { request.source(new SearchSourceBuilder().size(randomIntBetween(1, 10))); } request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -936,7 +936,7 @@ public void testReduceTopNWithFromOffset() throws Exception { SearchRequest request = new SearchRequest(); request.source(new SearchSourceBuilder().size(5).from(5)); request.setBatchedReduceSize(randomIntBetween(2, 4)); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -994,7 +994,7 @@ public void testConsumerSortByField() throws Exception { SearchRequest request = randomSearchRequest(); int size = randomIntBetween(1, 10); request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -1050,7 +1050,7 @@ public void testConsumerFieldCollapsing() throws Exception { SearchRequest request = randomSearchRequest(); int size = randomIntBetween(5, 10); request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -1109,7 +1109,7 @@ public void testConsumerSuggestions() throws Exception { int bufferSize = randomIntBetween(2, 200); SearchRequest request = randomSearchRequest(); request.setBatchedReduceSize(bufferSize); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -1256,7 +1256,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(numReduceListener.incrementAndGet(), reducePhase); } }; - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, @@ -1347,7 +1347,7 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t if (shouldFailPartial) { circuitBreaker.shouldBreak.set(true); } - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, circuitBreaker, () -> false, @@ -1419,7 +1419,7 @@ public void testFailConsumeAggs() throws Exception { request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo")).size(0)); request.setBatchedReduceSize(bufferSize); AtomicBoolean hasConsumedFailure = new AtomicBoolean(); - QueryPhaseResultConsumer consumer = searchPhaseController.newSearchPhaseResults( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( fixedExecutor, new NoopCircuitBreaker(CircuitBreaker.REQUEST), () -> false, From 7273ef69a6b67cc8adc12b44fdb6804ce843ebdf Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 4 Dec 2023 23:52:07 +0100 Subject: [PATCH 194/263] [Enterprise Search] Add update ingestion stats endpoint (#102926) Add update connector sync job ingestion stats endpoint. --- .../api/connector_sync_job.update_stats.json | 39 +++ .../460_connector_sync_job_update_stats.yml | 160 ++++++++++++ .../xpack/application/EnterpriseSearch.java | 12 +- .../connector/syncjob/ConnectorSyncJob.java | 10 +- .../syncjob/ConnectorSyncJobIndexService.java | 55 ++++ ...eConnectorSyncJobIngestionStatsAction.java | 52 ++++ ...eConnectorSyncJobIngestionStatsAction.java | 53 ++++ ...eConnectorSyncJobIngestionStatsAction.java | 242 ++++++++++++++++++ .../ConnectorSyncJobIndexServiceTests.java | 131 ++++++++++ .../syncjob/ConnectorSyncJobTestUtils.java | 32 +++ ...ectorSyncJobIngestionStatsActionTests.java | 81 ++++++ ...StatsActionRequestBWCSerializingTests.java | 61 +++++ ...ncJobIngestionStatsActionRequestTests.java | 109 ++++++++ .../xpack/security/operator/Constants.java | 1 + 14 files changed, 1031 insertions(+), 7 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json new file mode 100644 index 0000000000000..52f5a55cc8458 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json @@ -0,0 +1,39 @@ +{ + "connector_sync_job.update_stats": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the stats fields in the connector sync job document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}/_stats", + "methods": [ + "PUT" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to be updated." + } + } + } + ] + }, + "body": { + "description": "The stats to update for the connector sync job.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml new file mode 100644 index 0000000000000..0e69866ce8b6c --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml @@ -0,0 +1,160 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +"Update the ingestion stats for a connector sync job - only mandatory parameters": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + + - match: { acknowledged: true } + + +--- +"Update the ingestion stats for a connector sync job - negative deleted document count error": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: -10 + indexed_document_count: 20 + indexed_document_volume: 1000 + catch: bad_request + + +--- +"Update the ingestion stats for a connector sync job - negative indexed document count error": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: -20 + indexed_document_volume: 1000 + catch: bad_request + + +--- +"Update the ingestion stats for a connector sync job - negative indexed document volume error": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: -1000 + catch: bad_request + +--- +"Update the ingestion stats for a connector sync job - negative optional total document count error": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + total_document_count: -10 + catch: bad_request + +--- +"Update the ingestion stats for a connector sync job - with optional total_document_count": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + total_document_count: 20 + + - match: { acknowledged: true } + + +--- +"Update the ingestion stats for a connector sync job - with optional last_seen": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.update_stats: + connector_sync_job_id: $id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + last_seen: 2023-12-04T08:45:50.567149Z + + - match: { acknowledged: true } + +--- +"Update the ingestion stats for a Connector Sync Job - Connector Sync Job does not exist": + - do: + connector_sync_job.update_stats: + connector_sync_job_id: test-nonexistent-connector-sync-job-id + body: + deleted_document_count: 10 + indexed_document_count: 20 + indexed_document_volume: 1000 + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 1a8ae73c41935..c4dbee214f37a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -85,11 +85,14 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobIngestionStatsAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; import org.elasticsearch.xpack.application.rules.RuleQueryBuilder; @@ -223,7 +226,11 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(PostConnectorSyncJobAction.INSTANCE, TransportPostConnectorSyncJobAction.class), new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), - new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class) + new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class), + new ActionHandler<>( + UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, + TransportUpdateConnectorSyncJobIngestionStatsAction.class + ) ) ); } @@ -292,7 +299,8 @@ public List getRestHandlers( new RestPostConnectorSyncJobAction(), new RestDeleteConnectorSyncJobAction(), new RestCancelConnectorSyncJobAction(), - new RestCheckInConnectorSyncJobAction() + new RestCheckInConnectorSyncJobAction(), + new RestUpdateConnectorSyncJobIngestionStatsAction() ) ); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 0c6caa3376c7b..0781bb515fe93 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -75,19 +75,19 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ParseField CREATED_AT_FIELD = new ParseField("created_at"); - static final ParseField DELETED_DOCUMENT_COUNT_FIELD = new ParseField("deleted_document_count"); + public static final ParseField DELETED_DOCUMENT_COUNT_FIELD = new ParseField("deleted_document_count"); static final ParseField ERROR_FIELD = new ParseField("error"); public static final ParseField ID_FIELD = new ParseField("id"); - static final ParseField INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("indexed_document_count"); + public static final ParseField INDEXED_DOCUMENT_COUNT_FIELD = new ParseField("indexed_document_count"); - static final ParseField INDEXED_DOCUMENT_VOLUME_FIELD = new ParseField("indexed_document_volume"); + public static final ParseField INDEXED_DOCUMENT_VOLUME_FIELD = new ParseField("indexed_document_volume"); public static final ParseField JOB_TYPE_FIELD = new ParseField("job_type"); - static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); + public static final ParseField LAST_SEEN_FIELD = new ParseField("last_seen"); static final ParseField METADATA_FIELD = new ParseField("metadata"); @@ -95,7 +95,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ParseField STATUS_FIELD = new ParseField("status"); - static final ParseField TOTAL_DOCUMENT_COUNT_FIELD = new ParseField("total_document_count"); + public static final ParseField TOTAL_DOCUMENT_COUNT_FIELD = new ParseField("total_document_count"); public static final ParseField TRIGGER_METHOD_FIELD = new ParseField("trigger_method"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 6a7aec2fc7430..f105e6ece72aa 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -36,9 +36,11 @@ import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import java.io.IOException; import java.time.Instant; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -249,6 +251,59 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener listener + ) { + String syncJobId = request.getConnectorSyncJobId(); + + Map fieldsToUpdate = new HashMap<>( + Map.of( + ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), + request.getDeletedDocumentCount(), + ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), + request.getIndexedDocumentCount(), + ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), + request.getIndexedDocumentVolume() + ) + ); + + if (Objects.nonNull(request.getTotalDocumentCount())) { + fieldsToUpdate.put(ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), request.getTotalDocumentCount()); + } + // TODO: what to do, if no total document count is specified? Calculate it via the current count and params of the request? fetch + // the size of the target index? + + Instant lastSeen = Objects.nonNull(request.getLastSeen()) ? request.getLastSeen() : Instant.now(); + fieldsToUpdate.put(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName(), lastSeen); + + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, syncJobId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ).doc(fieldsToUpdate); + + try { + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(syncJobId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(syncJobId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + + } + private String generateId() { /* Workaround: only needed for generating an id upfront, autoGenerateId() has a side effect generating a timestamp, * which would raise an error on the response layer later ("autoGeneratedTimestamp should not be set externally"). diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java new file mode 100644 index 0000000000000..aedd1605b8bfb --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; + +public class RestUpdateConnectorSyncJobIngestionStatsAction extends BaseRestHandler { + @Override + public String getName() { + return "connector_sync_job_update_ingestion_stats"; + } + + @Override + public List routes() { + return List.of( + new Route( + RestRequest.Method.PUT, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}/_stats" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + UpdateConnectorSyncJobIngestionStatsAction.Request request = UpdateConnectorSyncJobIngestionStatsAction.Request.fromXContentBytes( + restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM), + restRequest.content(), + restRequest.getXContentType() + ); + + return channel -> client.execute( + UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, + request, + new RestToXContentListener<>(channel, r -> RestStatus.OK) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java new file mode 100644 index 0000000000000..864da6ca3095b --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportUpdateConnectorSyncJobIngestionStatsAction extends HandledTransportAction< + UpdateConnectorSyncJobIngestionStatsAction.Request, + AcknowledgedResponse> { + + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportUpdateConnectorSyncJobIngestionStatsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorSyncJobIngestionStatsAction.NAME, + transportService, + actionFilters, + UpdateConnectorSyncJobIngestionStatsAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorSyncJobIngestionStatsAction.Request request, + ActionListener listener + ) { + connectorSyncJobIndexService.updateConnectorSyncJobIngestionStats(request, listener.map(r -> AcknowledgedResponse.TRUE)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java new file mode 100644 index 0000000000000..34d8be2af4881 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java @@ -0,0 +1,242 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; + +import java.io.IOException; +import java.time.Instant; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; + +public class UpdateConnectorSyncJobIngestionStatsAction extends ActionType { + + public static final UpdateConnectorSyncJobIngestionStatsAction INSTANCE = new UpdateConnectorSyncJobIngestionStatsAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/update_stats"; + + public UpdateConnectorSyncJobIngestionStatsAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends ActionRequest implements ToXContentObject { + public static final ParseField CONNECTOR_SYNC_JOB_ID_FIELD = new ParseField("connector_sync_job_id"); + public static final String DELETED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE = "[deleted_document_count] cannot be negative."; + public static final String INDEXED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE = "[indexed_document_count] cannot be negative."; + public static final String INDEXED_DOCUMENT_VOLUME_NEGATIVE_ERROR_MESSAGE = "[indexed_document_volume] cannot be negative."; + public static final String TOTAL_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE = "[total_document_count] cannot be negative."; + + private final String connectorSyncJobId; + private final Long deletedDocumentCount; + private final Long indexedDocumentCount; + private final Long indexedDocumentVolume; + private final Long totalDocumentCount; + private final Instant lastSeen; + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + this.deletedDocumentCount = in.readLong(); + this.indexedDocumentCount = in.readLong(); + this.indexedDocumentVolume = in.readLong(); + this.totalDocumentCount = in.readOptionalLong(); + this.lastSeen = in.readOptionalInstant(); + } + + public Request( + String connectorSyncJobId, + Long deletedDocumentCount, + Long indexedDocumentCount, + Long indexedDocumentVolume, + Long totalDocumentCount, + Instant lastSeen + ) { + this.connectorSyncJobId = connectorSyncJobId; + this.deletedDocumentCount = deletedDocumentCount; + this.indexedDocumentCount = indexedDocumentCount; + this.indexedDocumentVolume = indexedDocumentVolume; + this.totalDocumentCount = totalDocumentCount; + this.lastSeen = lastSeen; + } + + public static UpdateConnectorSyncJobIngestionStatsAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + public Long getDeletedDocumentCount() { + return deletedDocumentCount; + } + + public Long getIndexedDocumentCount() { + return indexedDocumentCount; + } + + public Long getIndexedDocumentVolume() { + return indexedDocumentVolume; + } + + public Long getTotalDocumentCount() { + return totalDocumentCount; + } + + public Instant getLastSeen() { + return lastSeen; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError(EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, validationException); + } + + if (deletedDocumentCount < 0L) { + validationException = addValidationError(DELETED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE, validationException); + } + + if (indexedDocumentCount < 0L) { + validationException = addValidationError(INDEXED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE, validationException); + } + + if (indexedDocumentVolume < 0L) { + validationException = addValidationError(INDEXED_DOCUMENT_VOLUME_NEGATIVE_ERROR_MESSAGE, validationException); + } + + if (Objects.nonNull(totalDocumentCount) && totalDocumentCount < 0L) { + validationException = addValidationError(TOTAL_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE, validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("connector_sync_job_update_ingestion_stats", false, (args, connectorSyncJobId) -> { + Long deletedDocumentCount = (Long) args[0]; + Long indexedDocumentCount = (Long) args[1]; + Long indexedDocumentVolume = (Long) args[2]; + + Long totalDocumentVolume = args[3] != null ? (Long) args[3] : null; + Instant lastSeen = args[4] != null ? (Instant) args[4] : null; + + return new Request( + connectorSyncJobId, + deletedDocumentCount, + indexedDocumentCount, + indexedDocumentVolume, + totalDocumentVolume, + lastSeen + ); + }); + + static { + PARSER.declareLong(constructorArg(), ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD); + PARSER.declareLong(constructorArg(), ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD); + PARSER.declareLong(constructorArg(), ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD); + PARSER.declareLong(optionalConstructorArg(), ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> Instant.parse(p.text()), + ConnectorSyncJob.LAST_SEEN_FIELD, + ObjectParser.ValueType.OBJECT_OR_STRING + ); + } + + public static UpdateConnectorSyncJobIngestionStatsAction.Request fromXContentBytes( + String connectorSyncJobId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorSyncJobIngestionStatsAction.Request.fromXContent(parser, connectorSyncJobId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString()); + } + } + + public static Request fromXContent(XContentParser parser, String connectorSyncJobId) throws IOException { + return PARSER.parse(parser, connectorSyncJobId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD.getPreferredName(), deletedDocumentCount); + builder.field(ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName(), indexedDocumentCount); + builder.field(ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName(), indexedDocumentVolume); + builder.field(ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName(), totalDocumentCount); + builder.field(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName(), lastSeen); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + out.writeLong(deletedDocumentCount); + out.writeLong(indexedDocumentCount); + out.writeLong(indexedDocumentVolume); + out.writeOptionalLong(totalDocumentCount); + out.writeOptionalInstant(lastSeen); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId) + && Objects.equals(deletedDocumentCount, request.deletedDocumentCount) + && Objects.equals(indexedDocumentCount, request.indexedDocumentCount) + && Objects.equals(indexedDocumentVolume, request.indexedDocumentVolume) + && Objects.equals(totalDocumentCount, request.totalDocumentCount) + && Objects.equals(lastSeen, request.lastSeen); + } + + @Override + public int hashCode() { + return Objects.hash( + connectorSyncJobId, + deletedDocumentCount, + indexedDocumentCount, + indexedDocumentVolume, + totalDocumentCount, + lastSeen + ); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 8613078e3074e..2dcf43c6f3f22 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.junit.Before; import java.time.Instant; @@ -252,6 +253,136 @@ public void testCancelConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCancelConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testUpdateConnectorSyncJobIngestionStats() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + + UpdateConnectorSyncJobIngestionStatsAction.Request request = ConnectorSyncJobTestUtils + .getRandomUpdateConnectorSyncJobIngestionStatsActionRequest(syncJobId); + UpdateResponse updateResponse = awaitUpdateConnectorSyncJobIngestionStats(request); + Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); + + Long requestDeletedDocumentCount = request.getDeletedDocumentCount(); + Long requestIndexedDocumentCount = request.getIndexedDocumentCount(); + Long requestIndexedDocumentVolume = request.getIndexedDocumentVolume(); + Long requestTotalDocumentCount = request.getTotalDocumentCount(); + Instant requestLastSeen = request.getLastSeen(); + + Long deletedDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD.getPreferredName() + ); + Long indexedDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD.getPreferredName() + ); + Long indexedDocumentVolumeAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD.getPreferredName() + ); + Long totalDocumentCountAfterUpdate = (Long) syncJobSourceAfterUpdate.get( + ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD.getPreferredName() + ); + Instant lastSeenAfterUpdate = Instant.parse( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) + ); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertThat(deletedDocumentCountAfterUpdate, equalTo(requestDeletedDocumentCount)); + assertThat(indexedDocumentCountAfterUpdate, equalTo(requestIndexedDocumentCount)); + assertThat(indexedDocumentVolumeAfterUpdate, equalTo(requestIndexedDocumentVolume)); + assertThat(totalDocumentCountAfterUpdate, equalTo(requestTotalDocumentCount)); + assertThat(lastSeenAfterUpdate, equalTo(requestLastSeen)); + assertFieldsExceptAllIngestionStatsDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); + } + + public void testUpdateConnectorSyncJobIngestionStats_WithoutLastSeen_ExpectUpdateOfLastSeen() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + Map syncJobSourceBeforeUpdate = getConnectorSyncJobSourceById(syncJobId); + Instant lastSeenBeforeUpdate = Instant.parse( + (String) syncJobSourceBeforeUpdate.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) + ); + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + syncJobId, + 10L, + 20L, + 100L, + 10L, + null + ); + + safeSleep(ONE_SECOND_IN_MILLIS); + + UpdateResponse updateResponse = awaitUpdateConnectorSyncJobIngestionStats(request); + Map syncJobSourceAfterUpdate = getConnectorSyncJobSourceById(syncJobId); + Instant lastSeenAfterUpdate = Instant.parse( + (String) syncJobSourceAfterUpdate.get(ConnectorSyncJob.LAST_SEEN_FIELD.getPreferredName()) + ); + long secondsBetweenLastSeenBeforeAndAfterUpdate = ChronoUnit.SECONDS.between(lastSeenBeforeUpdate, lastSeenAfterUpdate); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertTrue(lastSeenAfterUpdate.isAfter(lastSeenBeforeUpdate)); + assertThat(secondsBetweenLastSeenBeforeAndAfterUpdate, greaterThanOrEqualTo(1L)); + assertFieldsExceptAllIngestionStatsDidNotUpdate(syncJobSourceBeforeUpdate, syncJobSourceAfterUpdate); + } + + public void testUpdateConnectorSyncJobIngestionStats_WithMissingSyncJobId_ExpectException() { + expectThrows( + ResourceNotFoundException.class, + () -> awaitUpdateConnectorSyncJobIngestionStats( + new UpdateConnectorSyncJobIngestionStatsAction.Request(NON_EXISTING_SYNC_JOB_ID, 0L, 0L, 0L, 0L, Instant.now()) + ) + ); + } + + private UpdateResponse awaitUpdateConnectorSyncJobIngestionStats(UpdateConnectorSyncJobIngestionStatsAction.Request request) + throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorSyncJobIndexService.updateConnectorSyncJobIngestionStats(request, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse updateResponse) { + resp.set(updateResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update request", resp.get()); + return resp.get(); + } + + private static void assertFieldsExceptAllIngestionStatsDidNotUpdate( + Map syncJobSourceBeforeUpdate, + Map syncJobSourceAfterUpdate + ) { + assertFieldsDidNotUpdateExceptFieldList( + syncJobSourceBeforeUpdate, + syncJobSourceAfterUpdate, + List.of( + ConnectorSyncJob.DELETED_DOCUMENT_COUNT_FIELD, + ConnectorSyncJob.INDEXED_DOCUMENT_COUNT_FIELD, + ConnectorSyncJob.INDEXED_DOCUMENT_VOLUME_FIELD, + ConnectorSyncJob.TOTAL_DOCUMENT_COUNT_FIELD, + ConnectorSyncJob.LAST_SEEN_FIELD + ) + ); + } + private static void assertFieldsExceptSyncStatusAndCancellationRequestedAtDidNotUpdate( Map syncJobSourceBeforeUpdate, Map syncJobSourceAfterUpdate diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 9ec404e109496..8170391094356 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import java.time.Instant; @@ -24,6 +25,7 @@ import static org.elasticsearch.test.ESTestCase.randomInt; import static org.elasticsearch.test.ESTestCase.randomLong; import static org.elasticsearch.test.ESTestCase.randomMap; +import static org.elasticsearch.test.ESTestCase.randomNonNegativeLong; public class ConnectorSyncJobTestUtils { @@ -102,6 +104,36 @@ public static CheckInConnectorSyncJobAction.Request getRandomCheckInConnectorSyn return new CheckInConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } + public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdateConnectorSyncJobIngestionStatsActionRequest() { + Instant lowerBoundInstant = Instant.ofEpochSecond(0L); + Instant upperBoundInstant = Instant.ofEpochSecond(3000000000L); + + return new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomInstantBetween(lowerBoundInstant, upperBoundInstant) + ); + } + + public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdateConnectorSyncJobIngestionStatsActionRequest( + String syncJobId + ) { + Instant lowerBoundInstant = Instant.ofEpochSecond(0L); + Instant upperBoundInstant = Instant.ofEpochSecond(3000000000L); + + return new UpdateConnectorSyncJobIngestionStatsAction.Request( + syncJobId, + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomInstantBetween(lowerBoundInstant, upperBoundInstant) + ); + } + public static GetConnectorSyncJobAction.Request getRandomGetConnectorSyncJobRequest() { return new GetConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java new file mode 100644 index 0000000000000..625c2e6d96cda --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsActionTests.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportUpdateConnectorSyncJobIngestionStatsActionTests extends ESSingleNodeTestCase { + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportUpdateConnectorSyncJobIngestionStatsAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportUpdateConnectorSyncJobIngestionStatsAction( + transportService, + clusterService, + mock(ActionFilters.class), + client() + ); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testUpdateConnectorSyncJobIngestionStatsAction_ExpectNoWarnings() throws InterruptedException { + UpdateConnectorSyncJobIngestionStatsAction.Request request = ConnectorSyncJobTestUtils + .getRandomUpdateConnectorSyncJobIngestionStatsActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(UpdateConnectorSyncJobIngestionStatsAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for update request", requestTimedOut); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..6e2178d8341cf --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorSyncJobIngestionStatsActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorSyncJobIngestionStatsAction.Request> { + + public String connectorSyncJobId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorSyncJobIngestionStatsAction.Request::new; + } + + @Override + protected UpdateConnectorSyncJobIngestionStatsAction.Request createTestInstance() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = ConnectorSyncJobTestUtils + .getRandomUpdateConnectorSyncJobIngestionStatsActionRequest(); + connectorSyncJobId = request.getConnectorSyncJobId(); + return request; + } + + @Override + protected UpdateConnectorSyncJobIngestionStatsAction.Request mutateInstance(UpdateConnectorSyncJobIngestionStatsAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorSyncJobIngestionStatsAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorSyncJobIngestionStatsAction.Request.fromXContent(parser, connectorSyncJobId); + } + + @Override + protected UpdateConnectorSyncJobIngestionStatsAction.Request mutateInstanceForVersion( + UpdateConnectorSyncJobIngestionStatsAction.Request instance, + TransportVersion version + ) { + return new UpdateConnectorSyncJobIngestionStatsAction.Request( + instance.getConnectorSyncJobId(), + instance.getDeletedDocumentCount(), + instance.getIndexedDocumentCount(), + instance.getIndexedDocumentVolume(), + instance.getTotalDocumentCount(), + instance.getLastSeen() + ); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestTests.java new file mode 100644 index 0000000000000..48ab14558db7e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsActionRequestTests.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import java.time.Instant; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; +import static org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction.Request.DELETED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE; +import static org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction.Request.INDEXED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE; +import static org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction.Request.INDEXED_DOCUMENT_VOLUME_NEGATIVE_ERROR_MESSAGE; +import static org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction.Request.TOTAL_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class UpdateConnectorSyncJobIngestionStatsActionRequestTests extends ESTestCase { + + public void testValidate_WhenRequestIsValid_ExpectNoValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = ConnectorSyncJobTestUtils + .getRandomUpdateConnectorSyncJobIngestionStatsActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + "", + 0L, + 0L, + 0L, + 0L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + + public void testValidate_WhenDeletedDocumentCountIsNegative_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + -10L, + 0L, + 0L, + 0L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(DELETED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE)); + } + + public void testValidate_WhenIndexedDocumentCountIsNegative_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + 0L, + -10L, + 0L, + 0L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(INDEXED_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE)); + } + + public void testValidate_WhenIndexedDocumentVolumeIsNegative_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + 0L, + 0L, + -10L, + 0L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(INDEXED_DOCUMENT_VOLUME_NEGATIVE_ERROR_MESSAGE)); + } + + public void testValidate_WhenTotalDocumentCountIsNegative_ExpectValidationError() { + UpdateConnectorSyncJobIngestionStatsAction.Request request = new UpdateConnectorSyncJobIngestionStatsAction.Request( + randomAlphaOfLength(10), + 0L, + 0L, + 0L, + -10L, + Instant.now() + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(TOTAL_DOCUMENT_COUNT_NEGATIVE_ERROR_MESSAGE)); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 44399e84ee6e0..11e293d8675f7 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -139,6 +139,7 @@ public class Constants { "cluster:admin/xpack/connector/sync_job/check_in", "cluster:admin/xpack/connector/sync_job/get", "cluster:admin/xpack/connector/sync_job/cancel", + "cluster:admin/xpack/connector/sync_job/update_stats", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", "cluster:admin/xpack/enrich/delete", From ad735e699ca4e8af23c5ebd4b921f530eadf9cb0 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 4 Dec 2023 16:15:50 -0800 Subject: [PATCH 195/263] Mute failing test --- .../resources/rest-api-spec/test/ml/3rd_party_deployment.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml index 2b9bc06bdd1d0..af3ecd2637843 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml @@ -218,7 +218,9 @@ setup: --- "Test start deployment fails while model download in progress": - + - skip: + version: "all" + reason: "Awaits fix: https://github.com/elastic/elasticsearch/issues/102948" - do: ml.put_trained_model: model_id: .elser_model_2 From 72afbc19566f6ff4c88dffb52c2479ddddf168fd Mon Sep 17 00:00:00 2001 From: Chenhui Wang <54903978+wangch079@users.noreply.github.com> Date: Tue, 5 Dec 2023 10:21:46 +0800 Subject: [PATCH 196/263] Add read pivilege on connectors indices for user kibana_system (#102770) --- .../authz/store/KibanaOwnedReservedRoleDescriptors.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 6a1da2e0ddfa0..f11f5c450b270 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -365,7 +365,9 @@ static RoleDescriptor kibanaSystem(String name) { // Kibana system user uses them to read / write slo data. RoleDescriptor.IndicesPrivileges.builder().indices(".slo-observability.*").privileges("all").build(), // Endpoint heartbeat. Kibana reads from these to determine metering/billing for endpoints. - RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read").build() }, + RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read").build(), + // For connectors telemetry. Will be removed once we switched to connectors API + RoleDescriptor.IndicesPrivileges.builder().indices(".elastic-connectors*").privileges("read").build() }, null, new ConfigurableClusterPrivilege[] { new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Set.of("kibana-*")), From 85311b20020ba424428c90a0aa457645287e8434 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Tue, 5 Dec 2023 07:03:59 +0100 Subject: [PATCH 197/263] [Profiling] Fix CO2 calculation with user-provided PUE and CO2PerKWH (#102884) Co-authored-by: Elastic Machine --- .../elasticsearch/xpack/profiling/CO2Calculator.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index 136821d491c59..0d92bf0a78d09 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.profiling; -import java.util.Collections; import java.util.Map; import static java.util.Map.entry; @@ -19,7 +18,6 @@ final class CO2Calculator { private static final double DEFAULT_KILOWATTS_PER_CORE_ARM64 = 2.8d / 1000.0d; // unit: watt / core private static final double DEFAULT_KILOWATTS_PER_CORE = DEFAULT_KILOWATTS_PER_CORE_X86; // unit: watt / core private static final double DEFAULT_DATACENTER_PUE = 1.7d; - private static final Provider DEFAULT_PROVIDER = new Provider(DEFAULT_DATACENTER_PUE, Collections.emptyMap()); private final InstanceTypeService instanceTypeService; private final Map hostMetadata; private final double samplingDurationInSeconds; @@ -76,12 +74,13 @@ private double getKiloWattsPerCore(HostMetadata host) { } private double getCO2TonsPerKWH(HostMetadata host) { - Provider provider = PROVIDERS.getOrDefault(host.instanceType.provider, DEFAULT_PROVIDER); - return provider.co2TonsPerKWH.getOrDefault(host.instanceType.region, customCO2PerKWH); + Provider provider = PROVIDERS.get(host.instanceType.provider); + return provider == null ? customCO2PerKWH : provider.co2TonsPerKWH.getOrDefault(host.instanceType.region, customCO2PerKWH); } - private static double getDatacenterPUE(HostMetadata host) { - return PROVIDERS.getOrDefault(host.instanceType.provider, DEFAULT_PROVIDER).pue; + private double getDatacenterPUE(HostMetadata host) { + Provider provider = PROVIDERS.get(host.instanceType.provider); + return provider == null ? customDatacenterPUE : provider.pue; } private record Provider(double pue, Map co2TonsPerKWH) {} From be98a4697e2719dbcf9a542cac377ed37b9cdb6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Tue, 5 Dec 2023 07:53:49 +0100 Subject: [PATCH 198/263] [LTR] Update the feature name from "learn to rank" to "learning to rank". (#102938) --- .../cluster/stats/SearchUsageStatsTests.java | 6 +- .../test/cluster/FeatureFlag.java | 2 +- ...kConfig.java => LearningToRankConfig.java} | 72 ++++----- ...earningToRankFeatureExtractorBuilder.java} | 4 +- .../ltr/QueryExtractorBuilder.java | 2 +- .../ml/ltr/MlLTRNamedXContentProvider.java | 18 +-- ...ts.java => LearningToRankConfigTests.java} | 40 ++--- .../ml/qa/basic-multi-node/build.gradle | 2 +- ...T.java => MlLearningToRankRescorerIT.java} | 18 +-- .../ml/qa/ml-with-security/build.gradle | 4 +- ...rIT.java => LearningToRankRescorerIT.java} | 20 +-- .../xpack/ml/MachineLearning.java | 24 +-- .../loadingservice/ModelLoadingService.java | 2 +- ...corer.java => LearningToRankRescorer.java} | 12 +- ...ava => LearningToRankRescorerBuilder.java} | 146 ++++++++++-------- ...ava => LearningToRankRescorerContext.java} | 24 +-- ...ava => LearningToRankRescorerFeature.java} | 12 +- ...ervice.java => LearningToRankService.java} | 38 ++--- ...ingToRankRescorerBuilderRewriteTests.java} | 93 +++++------ ...ankRescorerBuilderSerializationTests.java} | 63 ++++---- ...s.java => LearningToRankServiceTests.java} | 66 ++++---- .../xpack/test/rest/XPackRestIT.java | 2 +- ...orer.yml => learning_to_rank_rescorer.yml} | 14 +- 23 files changed, 351 insertions(+), 333 deletions(-) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/{LearnToRankConfig.java => LearningToRankConfig.java} (71%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/{LearnToRankFeatureExtractorBuilder.java => LearningToRankFeatureExtractorBuilder.java} (88%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/{LearnToRankConfigTests.java => LearningToRankConfigTests.java} (84%) rename x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/{MlRescorerIT.java => MlLearningToRankRescorerIT.java} (97%) rename x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/{LearnToRankRescorerIT.java => LearningToRankRescorerIT.java} (94%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorer.java => LearningToRankRescorer.java} (94%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerBuilder.java => LearningToRankRescorerBuilder.java} (57%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerContext.java => LearningToRankRescorerContext.java} (81%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerFeature.java => LearningToRankRescorerFeature.java} (57%) rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankService.java => LearningToRankService.java} (86%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerBuilderRewriteTests.java => LearningToRankRescorerBuilderRewriteTests.java} (66%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankRescorerBuilderSerializationTests.java => LearningToRankRescorerBuilderSerializationTests.java} (71%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/{LearnToRankServiceTests.java => LearningToRankServiceTests.java} (77%) rename x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/{learn_to_rank_rescorer.yml => learning_to_rank_rescorer.yml} (94%) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java index cc4509500f9c1..a5704748ea242 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/SearchUsageStatsTests.java @@ -34,7 +34,7 @@ public class SearchUsageStatsTests extends AbstractWireSerializingTestCase RESCORER_TYPES = List.of("query", "learn_to_rank"); + private static final List RESCORER_TYPES = List.of("query", "learning_to_rank"); private static final List SECTIONS = List.of( "highlight", @@ -136,14 +136,14 @@ public void testAdd() { searchUsageStats.add( new SearchUsageStats( Map.of("term", 1L, "match", 1L), - Map.of("query", 5L, "learn_to_rank", 2L), + Map.of("query", 5L, "learning_to_rank", 2L), Map.of("query", 10L, "knn", 1L), 10L ) ); assertEquals(Map.of("match", 11L, "term", 1L), searchUsageStats.getQueryUsage()); assertEquals(Map.of("query", 20L, "knn", 1L), searchUsageStats.getSectionsUsage()); - assertEquals(Map.of("query", 10L, "learn_to_rank", 2L), searchUsageStats.getRescorerUsage()); + assertEquals(Map.of("query", 10L, "learning_to_rank", 2L), searchUsageStats.getRescorerUsage()); assertEquals(20L, searchUsageStats.getTotalSearchCount()); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index ff7195f9f5f37..2c313da69b42e 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,7 +16,7 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - LEARN_TO_RANK("es.learn_to_rank_feature_flag_enabled=true", Version.fromString("8.10.0"), null), + LEARNING_TO_RANK("es.learning_to_rank_feature_flag_enabled=true", Version.fromString("8.12.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); public final String systemProperty; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java similarity index 71% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java index ba617ca8d04b8..7a51fb9a0fce3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfig.java @@ -17,7 +17,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.MlConfigVersion; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObjectHelper; @@ -30,29 +30,29 @@ import java.util.Set; import java.util.stream.Collectors; -public class LearnToRankConfig extends RegressionConfig implements Rewriteable { +public class LearningToRankConfig extends RegressionConfig implements Rewriteable { - public static final ParseField NAME = new ParseField("learn_to_rank"); + public static final ParseField NAME = new ParseField("learning_to_rank"); static final TransportVersion MIN_SUPPORTED_TRANSPORT_VERSION = TransportVersion.current(); public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); public static final ParseField FEATURE_EXTRACTORS = new ParseField("feature_extractors"); public static final ParseField DEFAULT_PARAMS = new ParseField("default_params"); - public static LearnToRankConfig EMPTY_PARAMS = new LearnToRankConfig(null, null, null); + public static LearningToRankConfig EMPTY_PARAMS = new LearningToRankConfig(null, null, null); - private static final ObjectParser LENIENT_PARSER = createParser(true); - private static final ObjectParser STRICT_PARSER = createParser(false); + private static final ObjectParser LENIENT_PARSER = createParser(true); + private static final ObjectParser STRICT_PARSER = createParser(false); - private static ObjectParser createParser(boolean lenient) { - ObjectParser parser = new ObjectParser<>( + private static ObjectParser createParser(boolean lenient) { + ObjectParser parser = new ObjectParser<>( NAME.getPreferredName(), lenient, - LearnToRankConfig.Builder::new + LearningToRankConfig.Builder::new ); parser.declareInt(Builder::setNumTopFeatureImportanceValues, NUM_TOP_FEATURE_IMPORTANCE_VALUES); parser.declareNamedObjects( - Builder::setLearnToRankFeatureExtractorBuilders, - (p, c, n) -> p.namedObject(LearnToRankFeatureExtractorBuilder.class, n, lenient), + Builder::setLearningToRankFeatureExtractorBuilders, + (p, c, n) -> p.namedObject(LearningToRankFeatureExtractorBuilder.class, n, lenient), b -> {}, FEATURE_EXTRACTORS ); @@ -60,30 +60,30 @@ private static ObjectParser createParser(boo return parser; } - public static LearnToRankConfig fromXContentStrict(XContentParser parser) { + public static LearningToRankConfig fromXContentStrict(XContentParser parser) { return STRICT_PARSER.apply(parser, null).build(); } - public static LearnToRankConfig fromXContentLenient(XContentParser parser) { + public static LearningToRankConfig fromXContentLenient(XContentParser parser) { return LENIENT_PARSER.apply(parser, null).build(); } - public static Builder builder(LearnToRankConfig config) { + public static Builder builder(LearningToRankConfig config) { return new Builder(config); } - private final List featureExtractorBuilders; + private final List featureExtractorBuilders; private final Map paramsDefaults; - public LearnToRankConfig( + public LearningToRankConfig( Integer numTopFeatureImportanceValues, - List featureExtractorBuilders, + List featureExtractorBuilders, Map paramsDefaults ) { super(DEFAULT_RESULTS_FIELD, numTopFeatureImportanceValues); if (featureExtractorBuilders != null) { Set featureNames = featureExtractorBuilders.stream() - .map(LearnToRankFeatureExtractorBuilder::featureName) + .map(LearningToRankFeatureExtractorBuilder::featureName) .collect(Collectors.toSet()); if (featureNames.size() < featureExtractorBuilders.size()) { throw new IllegalArgumentException( @@ -95,19 +95,19 @@ public LearnToRankConfig( this.paramsDefaults = Collections.unmodifiableMap(Objects.requireNonNullElse(paramsDefaults, Map.of())); } - public LearnToRankConfig(StreamInput in) throws IOException { + public LearningToRankConfig(StreamInput in) throws IOException { super(in); - this.featureExtractorBuilders = in.readNamedWriteableCollectionAsList(LearnToRankFeatureExtractorBuilder.class); + this.featureExtractorBuilders = in.readNamedWriteableCollectionAsList(LearningToRankFeatureExtractorBuilder.class); this.paramsDefaults = in.readMap(); } - public List getFeatureExtractorBuilders() { + public List getFeatureExtractorBuilders() { return featureExtractorBuilders; } public List getQueryFeatureExtractorBuilders() { List queryExtractorBuilders = new ArrayList<>(); - for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : featureExtractorBuilders) { + for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : featureExtractorBuilders) { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { queryExtractorBuilders.add(queryExtractorBuilder); } @@ -189,7 +189,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - LearnToRankConfig that = (LearnToRankConfig) o; + LearningToRankConfig that = (LearningToRankConfig) o; return Objects.equals(featureExtractorBuilders, that.featureExtractorBuilders) && Objects.equals(paramsDefaults, that.paramsDefaults); } @@ -220,33 +220,33 @@ public TransportVersion getMinimalSupportedTransportVersion() { } @Override - public LearnToRankConfig rewrite(QueryRewriteContext ctx) throws IOException { + public LearningToRankConfig rewrite(QueryRewriteContext ctx) throws IOException { if (this.featureExtractorBuilders.isEmpty()) { return this; } boolean rewritten = false; - List rewrittenExtractors = new ArrayList<>(this.featureExtractorBuilders.size()); - for (LearnToRankFeatureExtractorBuilder extractorBuilder : this.featureExtractorBuilders) { - LearnToRankFeatureExtractorBuilder rewrittenExtractor = Rewriteable.rewrite(extractorBuilder, ctx); + List rewrittenExtractors = new ArrayList<>(this.featureExtractorBuilders.size()); + for (LearningToRankFeatureExtractorBuilder extractorBuilder : this.featureExtractorBuilders) { + LearningToRankFeatureExtractorBuilder rewrittenExtractor = Rewriteable.rewrite(extractorBuilder, ctx); rewrittenExtractors.add(rewrittenExtractor); rewritten |= (rewrittenExtractor != extractorBuilder); } if (rewritten) { - return new LearnToRankConfig(getNumTopFeatureImportanceValues(), rewrittenExtractors, paramsDefaults); + return new LearningToRankConfig(getNumTopFeatureImportanceValues(), rewrittenExtractors, paramsDefaults); } return this; } public static class Builder { private Integer numTopFeatureImportanceValues; - private List learnToRankFeatureExtractorBuilders; + private List learningToRankFeatureExtractorBuilders; private Map paramsDefaults = Map.of(); Builder() {} - Builder(LearnToRankConfig config) { + Builder(LearningToRankConfig config) { this.numTopFeatureImportanceValues = config.getNumTopFeatureImportanceValues(); - this.learnToRankFeatureExtractorBuilders = config.featureExtractorBuilders; + this.learningToRankFeatureExtractorBuilders = config.featureExtractorBuilders; this.paramsDefaults = config.getParamsDefaults(); } @@ -255,10 +255,10 @@ public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceV return this; } - public Builder setLearnToRankFeatureExtractorBuilders( - List learnToRankFeatureExtractorBuilders + public Builder setLearningToRankFeatureExtractorBuilders( + List learningToRankFeatureExtractorBuilders ) { - this.learnToRankFeatureExtractorBuilders = learnToRankFeatureExtractorBuilders; + this.learningToRankFeatureExtractorBuilders = learningToRankFeatureExtractorBuilders; return this; } @@ -267,8 +267,8 @@ public Builder setParamsDefaults(Map paramsDefaults) { return this; } - public LearnToRankConfig build() { - return new LearnToRankConfig(numTopFeatureImportanceValues, learnToRankFeatureExtractorBuilders, paramsDefaults); + public LearningToRankConfig build() { + return new LearningToRankConfig(numTopFeatureImportanceValues, learningToRankFeatureExtractorBuilders, paramsDefaults); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearnToRankFeatureExtractorBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearningToRankFeatureExtractorBuilder.java similarity index 88% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearnToRankFeatureExtractorBuilder.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearningToRankFeatureExtractorBuilder.java index 3eac7d0d0a245..a610756cda22e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearnToRankFeatureExtractorBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/LearningToRankFeatureExtractorBuilder.java @@ -12,11 +12,11 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ml.utils.NamedXContentObject; -public interface LearnToRankFeatureExtractorBuilder +public interface LearningToRankFeatureExtractorBuilder extends NamedXContentObject, NamedWriteable, - Rewriteable { + Rewriteable { ParseField FEATURE_NAME = new ParseField("feature_name"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java index a138fbbb98ba1..d9e90b92382e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ltr/QueryExtractorBuilder.java @@ -27,7 +27,7 @@ public record QueryExtractorBuilder(String featureName, QueryProvider query, float defaultScore) implements - LearnToRankFeatureExtractorBuilder { + LearningToRankFeatureExtractorBuilder { public static final ParseField NAME = new ParseField("query_extractor"); public static final ParseField FEATURE_NAME = new ParseField("feature_name"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/ltr/MlLTRNamedXContentProvider.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/ltr/MlLTRNamedXContentProvider.java index c7a8db0ebf011..0f59d1183a632 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/ltr/MlLTRNamedXContentProvider.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/ltr/MlLTRNamedXContentProvider.java @@ -10,10 +10,10 @@ import org.elasticsearch.plugins.spi.NamedXContentProvider; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedInferenceConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.StrictlyParsedInferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import java.util.ArrayList; @@ -32,22 +32,22 @@ public List getNamedXContentParsers() { namedXContent.add( new NamedXContentRegistry.Entry( LenientlyParsedInferenceConfig.class, - LearnToRankConfig.NAME, - LearnToRankConfig::fromXContentLenient + LearningToRankConfig.NAME, + LearningToRankConfig::fromXContentLenient ) ); // Strict Inference Config namedXContent.add( new NamedXContentRegistry.Entry( StrictlyParsedInferenceConfig.class, - LearnToRankConfig.NAME, - LearnToRankConfig::fromXContentStrict + LearningToRankConfig.NAME, + LearningToRankConfig::fromXContentStrict ) ); // LTR extractors namedXContent.add( new NamedXContentRegistry.Entry( - LearnToRankFeatureExtractorBuilder.class, + LearningToRankFeatureExtractorBuilder.class, QueryExtractorBuilder.NAME, QueryExtractorBuilder::fromXContent ) @@ -59,12 +59,12 @@ public List getNamedWriteables() { List namedWriteables = new ArrayList<>(); // Inference config namedWriteables.add( - new NamedWriteableRegistry.Entry(InferenceConfig.class, LearnToRankConfig.NAME.getPreferredName(), LearnToRankConfig::new) + new NamedWriteableRegistry.Entry(InferenceConfig.class, LearningToRankConfig.NAME.getPreferredName(), LearningToRankConfig::new) ); // LTR Extractors namedWriteables.add( new NamedWriteableRegistry.Entry( - LearnToRankFeatureExtractorBuilder.class, + LearningToRankFeatureExtractorBuilder.class, QueryExtractorBuilder.NAME.getPreferredName(), QueryExtractorBuilder::new ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfigTests.java similarity index 84% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfigTests.java index 1059af21ab7eb..09d2366984383 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearnToRankConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/LearningToRankConfigTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.inference.InferenceConfigItemTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilderTests; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; import org.junit.Before; @@ -37,11 +37,11 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class LearnToRankConfigTests extends InferenceConfigItemTestCase { +public class LearningToRankConfigTests extends InferenceConfigItemTestCase { private boolean lenient; - public static LearnToRankConfig randomLearnToRankConfig() { - return new LearnToRankConfig( + public static LearningToRankConfig randomLearningToRankConfig() { + return new LearningToRankConfig( randomBoolean() ? null : randomIntBetween(0, 10), randomBoolean() ? null @@ -56,15 +56,15 @@ public void chooseStrictOrLenient() { } @Override - protected LearnToRankConfig createTestInstance() { - return randomLearnToRankConfig(); + protected LearningToRankConfig createTestInstance() { + return randomLearningToRankConfig(); } @Override - protected LearnToRankConfig mutateInstance(LearnToRankConfig instance) { + protected LearningToRankConfig mutateInstance(LearningToRankConfig instance) { int i = randomInt(2); - LearnToRankConfig.Builder builder = LearnToRankConfig.builder(instance); + LearningToRankConfig.Builder builder = LearningToRankConfig.builder(instance); switch (i) { case 0 -> { @@ -76,7 +76,7 @@ protected LearnToRankConfig mutateInstance(LearnToRankConfig instance) { ); } case 1 -> { - builder.setLearnToRankFeatureExtractorBuilders( + builder.setLearningToRankFeatureExtractorBuilders( randomValueOtherThan( instance.getFeatureExtractorBuilders(), () -> randomBoolean() || instance.getFeatureExtractorBuilders().isEmpty() @@ -109,13 +109,13 @@ protected Predicate getRandomFieldsExcludeFilter() { } @Override - protected Writeable.Reader instanceReader() { - return LearnToRankConfig::new; + protected Writeable.Reader instanceReader() { + return LearningToRankConfig::new; } @Override - protected LearnToRankConfig doParseInstance(XContentParser parser) throws IOException { - return lenient ? LearnToRankConfig.fromXContentLenient(parser) : LearnToRankConfig.fromXContentStrict(parser); + protected LearningToRankConfig doParseInstance(XContentParser parser) throws IOException { + return lenient ? LearningToRankConfig.fromXContentLenient(parser) : LearningToRankConfig.fromXContentStrict(parser); } @Override @@ -124,18 +124,18 @@ protected boolean supportsUnknownFields() { } @Override - protected LearnToRankConfig mutateInstanceForVersion(LearnToRankConfig instance, TransportVersion version) { + protected LearningToRankConfig mutateInstanceForVersion(LearningToRankConfig instance, TransportVersion version) { return instance; } public void testDuplicateFeatureNames() { - List featureExtractorBuilderList = List.of( + List featureExtractorBuilderList = List.of( new TestValueExtractor("foo"), new TestValueExtractor("foo") ); - LearnToRankConfig.Builder builder = LearnToRankConfig.builder(randomLearnToRankConfig()) - .setLearnToRankFeatureExtractorBuilders(featureExtractorBuilderList); + LearningToRankConfig.Builder builder = LearningToRankConfig.builder(randomLearningToRankConfig()) + .setLearningToRankFeatureExtractorBuilders(featureExtractorBuilderList); expectThrows(IllegalArgumentException.class, () -> builder.build()); } @@ -148,7 +148,7 @@ protected NamedXContentRegistry xContentRegistry() { namedXContent.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); namedXContent.add( new NamedXContentRegistry.Entry( - LearnToRankFeatureExtractorBuilder.class, + LearningToRankFeatureExtractorBuilder.class, TestValueExtractor.NAME, TestValueExtractor::fromXContent ) @@ -163,7 +163,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); namedWriteables.add( new NamedWriteableRegistry.Entry( - LearnToRankFeatureExtractorBuilder.class, + LearningToRankFeatureExtractorBuilder.class, TestValueExtractor.NAME.getPreferredName(), TestValueExtractor::new ) @@ -171,7 +171,7 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry(namedWriteables); } - private static class TestValueExtractor implements LearnToRankFeatureExtractorBuilder { + private static class TestValueExtractor implements LearningToRankFeatureExtractorBuilder { public static final ParseField NAME = new ParseField("test"); private final String featureName; diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index bf6ab9ed7d77e..3f2f85e3e09da 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -17,7 +17,7 @@ testClusters.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' setting 'slm.history_index_enabled', 'false' - requiresFeature 'es.learn_to_rank_feature_flag_enabled', Version.fromString("8.10.0") + requiresFeature 'es.learning_to_rank_feature_flag_enabled', Version.fromString("8.12.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlRescorerIT.java b/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java similarity index 97% rename from x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlRescorerIT.java rename to x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java index 6dbb0a46121aa..0dab4f9e4256c 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlRescorerIT.java +++ b/x-pack/plugin/ml/qa/basic-multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlLearningToRankRescorerIT.java @@ -21,7 +21,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -public class MlRescorerIT extends ESRestTestCase { +public class MlLearningToRankRescorerIT extends ESRestTestCase { private static final String MODEL_ID = "basic-ltr-model"; private static final String INDEX_NAME = "store"; @@ -33,7 +33,7 @@ public void setupModelAndData() throws IOException { "description": "super complex model for tests", "input": { "field_names": ["cost", "product"] }, "inference_config": { - "learn_to_rank": { + "learning_to_rank": { "feature_extractors": [ { "query_extractor": { @@ -206,7 +206,7 @@ public void testLtrSimple() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } @@ -225,7 +225,7 @@ public void testLtrSimpleDFS() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model", "params": { "keyword": "TV" } } @@ -239,7 +239,7 @@ public void testLtrSimpleDFS() throws Exception { { "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model", "params": { "keyword": "TV" } } @@ -263,7 +263,7 @@ public void testLtrSimpleEmpty() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } @@ -282,7 +282,7 @@ public void testLtrEmptyDFS() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } @@ -301,7 +301,7 @@ public void testLtrCanMatch() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } @@ -317,7 +317,7 @@ public void testLtrCanMatch() throws Exception { }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "basic-ltr-model" } } diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index b8b706353d624..df2eb2c687fb5 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -181,7 +181,7 @@ tasks.named("yamlRestTest").configure { 'ml/inference_crud/Test put nlp model config with vocabulary set', 'ml/inference_crud/Test put model model aliases with nlp model', 'ml/inference_processor/Test create processor with missing mandatory fields', - 'ml/learn_to_rank_rescorer/Test rescore with missing model', + 'ml/learning_to_rank_rescorer/Test rescore with missing model', 'ml/inference_stats_crud/Test get stats given missing trained model', 'ml/inference_stats_crud/Test get stats given expression without matches and allow_no_match is false', 'ml/jobs_crud/Test cannot create job with model snapshot id set', @@ -258,5 +258,5 @@ testClusters.configureEach { user username: "no_ml", password: "x-pack-test-password", role: "minimal" setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' - requiresFeature 'es.learn_to_rank_feature_flag_enabled', Version.fromString("8.10.0") + requiresFeature 'es.learning_to_rank_feature_flag_enabled', Version.fromString("8.12.0") } diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearnToRankRescorerIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java similarity index 94% rename from x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearnToRankRescorerIT.java rename to x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java index d246f070f0b8d..0e060b3c94644 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearnToRankRescorerIT.java +++ b/x-pack/plugin/ml/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/LearningToRankRescorerIT.java @@ -18,7 +18,7 @@ import static org.hamcrest.Matchers.equalTo; -public class LearnToRankRescorerIT extends InferenceTestCase { +public class LearningToRankRescorerIT extends InferenceTestCase { private static final String MODEL_ID = "ltr-model"; private static final String INDEX_NAME = "store"; @@ -30,7 +30,7 @@ public void setupModelAndData() throws IOException { "description": "super complex model for tests", "input": {"field_names": ["cost", "product"]}, "inference_config": { - "learn_to_rank": { + "learning_to_rank": { "feature_extractors": [ { "query_extractor": { @@ -196,13 +196,13 @@ public void setupModelAndData() throws IOException { adminClient().performRequest(new Request("POST", INDEX_NAME + "/_refresh")); } - public void testLearnToRankRescore() throws Exception { + public void testLearningToRankRescore() throws Exception { Request request = new Request("GET", "store/_search?size=3&error_trace"); request.setJsonEntity(""" { "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } }"""); assertHitScores(client().performRequest(request), List.of(20.0, 20.0, 17.0)); @@ -211,7 +211,7 @@ public void testLearnToRankRescore() throws Exception { "query": { "term": { "product": "Laptop" } }, "rescore": { "window_size": 10, - "learn_to_rank": { + "learning_to_rank": { "model_id": "ltr-model", "params": { "keyword": "Laptop" @@ -225,25 +225,25 @@ public void testLearnToRankRescore() throws Exception { "query": {"term": { "product": "Laptop" } }, "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model"} + "learning_to_rank": { "model_id": "ltr-model"} } }"""); assertHitScores(client().performRequest(request), List.of(9.0, 9.0, 6.0)); } - public void testLearnToRankRescoreSmallWindow() throws Exception { + public void testLearningToRankRescoreSmallWindow() throws Exception { Request request = new Request("GET", "store/_search?size=5"); request.setJsonEntity(""" { "rescore": { "window_size": 2, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } }"""); assertHitScores(client().performRequest(request), List.of(20.0, 20.0, 1.0, 1.0, 1.0)); } - public void testLearnToRankRescorerWithChainedRescorers() throws IOException { + public void testLearningToRankRescorerWithChainedRescorers() throws IOException { Request request = new Request("GET", "store/_search?size=5"); request.setJsonEntity(""" { @@ -254,7 +254,7 @@ public void testLearnToRankRescorerWithChainedRescorers() throws IOException { }, { "window_size": 3, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } }, { "window_size": 2, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index d0f7302105768..749a31de51b07 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -324,9 +324,9 @@ import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; -import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankRescorerBuilder; -import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankRescorerFeature; -import org.elasticsearch.xpack.ml.inference.ltr.LearnToRankService; +import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankRescorerBuilder; +import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankRescorerFeature; +import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankService; import org.elasticsearch.xpack.ml.inference.modelsize.MlModelSizeNamedXContentProvider; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.inference.pytorch.process.BlackHolePyTorchProcess; @@ -762,7 +762,7 @@ public void loadExtensions(ExtensionLoader loader) { private final SetOnce mlLifeCycleService = new SetOnce<>(); private final SetOnce inferenceModelBreaker = new SetOnce<>(); private final SetOnce modelLoadingService = new SetOnce<>(); - private final SetOnce learnToRankService = new SetOnce<>(); + private final SetOnce learningToRankService = new SetOnce<>(); private final SetOnce mlAutoscalingDeciderService = new SetOnce<>(); private final SetOnce deploymentManager = new SetOnce<>(); private final SetOnce trainedModelAllocationClusterServiceSetOnce = new SetOnce<>(); @@ -886,12 +886,12 @@ private static void reportClashingNodeAttribute(String attrName) { @Override public List> getRescorers() { - if (enabled && LearnToRankRescorerFeature.isEnabled()) { + if (enabled && LearningToRankRescorerFeature.isEnabled()) { return List.of( new RescorerSpec<>( - LearnToRankRescorerBuilder.NAME, - in -> new LearnToRankRescorerBuilder(in, learnToRankService.get()), - parser -> LearnToRankRescorerBuilder.fromXContent(parser, learnToRankService.get()) + LearningToRankRescorerBuilder.NAME, + in -> new LearningToRankRescorerBuilder(in, learningToRankService.get()), + parser -> LearningToRankRescorerBuilder.fromXContent(parser, learningToRankService.get()) ) ); } @@ -1120,8 +1120,8 @@ public Collection createComponents(PluginServices services) { ); this.modelLoadingService.set(modelLoadingService); - this.learnToRankService.set( - new LearnToRankService(modelLoadingService, trainedModelProvider, services.scriptService(), services.xContentRegistry()) + this.learningToRankService.set( + new LearningToRankService(modelLoadingService, trainedModelProvider, services.scriptService(), services.xContentRegistry()) ); this.deploymentManager.set( @@ -1797,7 +1797,7 @@ public List getNamedXContent() { ); namedXContent.addAll(new CorrelationNamedContentProvider().getNamedXContentParsers()); // LTR Combine with Inference named content provider when feature flag is removed - if (LearnToRankRescorerFeature.isEnabled()) { + if (LearningToRankRescorerFeature.isEnabled()) { namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); } return namedXContent; @@ -1885,7 +1885,7 @@ public List getNamedWriteables() { namedWriteables.addAll(new CorrelationNamedContentProvider().getNamedWriteables()); namedWriteables.addAll(new ChangePointNamedContentProvider().getNamedWriteables()); // LTR Combine with Inference named content provider when feature flag is removed - if (LearnToRankRescorerFeature.isEnabled()) { + if (LearningToRankRescorerFeature.isEnabled()) { namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); } return namedWriteables; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index a82beaf936573..e9b7a1a3e137b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -298,7 +298,7 @@ public void getModelForAggregation(String modelId, ActionListener mo * @param modelId the model to get * @param modelActionListener the listener to alert when the model has been retrieved */ - public void getModelForLearnToRank(String modelId, ActionListener modelActionListener) { + public void getModelForLearningToRank(String modelId, ActionListener modelActionListener) { getModel(modelId, Consumer.SEARCH_RESCORER, null, modelActionListener); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java similarity index 94% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorer.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java index dd1df7d8090d6..068462bcdfca2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java @@ -32,17 +32,17 @@ import static java.util.stream.Collectors.toUnmodifiableSet; -public class LearnToRankRescorer implements Rescorer { +public class LearningToRankRescorer implements Rescorer { - public static final LearnToRankRescorer INSTANCE = new LearnToRankRescorer(); - private static final Logger logger = LogManager.getLogger(LearnToRankRescorer.class); + public static final LearningToRankRescorer INSTANCE = new LearningToRankRescorer(); + private static final Logger logger = LogManager.getLogger(LearningToRankRescorer.class); private static final Comparator SCORE_DOC_COMPARATOR = (o1, o2) -> { int cmp = Float.compare(o2.score, o1.score); return cmp == 0 ? Integer.compare(o1.doc, o2.doc) : cmp; }; - private LearnToRankRescorer() { + private LearningToRankRescorer() { } @@ -51,7 +51,7 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r if (topDocs.scoreDocs.length == 0) { return topDocs; } - LearnToRankRescorerContext ltrRescoreContext = (LearnToRankRescorerContext) rescoreContext; + LearningToRankRescorerContext ltrRescoreContext = (LearningToRankRescorerContext) rescoreContext; if (ltrRescoreContext.regressionModelDefinition == null) { throw new IllegalStateException("local model reference is null, missing rewriteAndFetch before rescore phase?"); } @@ -104,7 +104,7 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r for (int i = 0; i < hitsToRescore.length; i++) { Map features = docFeatures.get(i); try { - InferenceResults results = definition.inferLtr(features, ltrRescoreContext.learnToRankConfig); + InferenceResults results = definition.inferLtr(features, ltrRescoreContext.learningToRankConfig); if (results instanceof WarningInferenceResults warningInferenceResults) { logger.warn("Failure rescoring doc, warning returned [" + warningInferenceResults.getWarning() + "]"); } else if (results.predictedValue() instanceof Number prediction) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java similarity index 57% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilder.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java index 49a082c9da6df..038f3fb08adbf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java @@ -22,7 +22,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import java.io.IOException; @@ -30,9 +30,9 @@ import java.util.Objects; import java.util.function.Function; -public class LearnToRankRescorerBuilder extends RescorerBuilder { +public class LearningToRankRescorerBuilder extends RescorerBuilder { - public static final String NAME = "learn_to_rank"; + public static final String NAME = "learning_to_rank"; private static final ParseField MODEL_FIELD = new ParseField("model_id"); private static final ParseField PARAMS_FIELD = new ParseField("params"); private static final ObjectParser PARSER = new ObjectParser<>(NAME, false, Builder::new); @@ -42,56 +42,56 @@ public class LearnToRankRescorerBuilder extends RescorerBuilder p.map(), PARAMS_FIELD); } - public static LearnToRankRescorerBuilder fromXContent(XContentParser parser, LearnToRankService learnToRankService) { - return PARSER.apply(parser, null).build(learnToRankService); + public static LearningToRankRescorerBuilder fromXContent(XContentParser parser, LearningToRankService learningToRankService) { + return PARSER.apply(parser, null).build(learningToRankService); } private final String modelId; private final Map params; - private final LearnToRankService learnToRankService; + private final LearningToRankService learningToRankService; private final LocalModel localModel; - private final LearnToRankConfig learnToRankConfig; + private final LearningToRankConfig learningToRankConfig; private boolean rescoreOccurred = false; - LearnToRankRescorerBuilder(String modelId, Map params, LearnToRankService learnToRankService) { - this(modelId, null, params, learnToRankService); + LearningToRankRescorerBuilder(String modelId, Map params, LearningToRankService learningToRankService) { + this(modelId, null, params, learningToRankService); } - LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder( String modelId, - LearnToRankConfig learnToRankConfig, + LearningToRankConfig learningToRankConfig, Map params, - LearnToRankService learnToRankService + LearningToRankService learningToRankService ) { this.modelId = modelId; this.params = params; - this.learnToRankConfig = learnToRankConfig; - this.learnToRankService = learnToRankService; + this.learningToRankConfig = learningToRankConfig; + this.learningToRankService = learningToRankService; // Local inference model is not loaded yet. Will be done in a later rewrite. this.localModel = null; } - LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder( LocalModel localModel, - LearnToRankConfig learnToRankConfig, + LearningToRankConfig learningToRankConfig, Map params, - LearnToRankService learnToRankService + LearningToRankService learningToRankService ) { this.modelId = localModel.getModelId(); this.params = params; - this.learnToRankConfig = learnToRankConfig; + this.learningToRankConfig = learningToRankConfig; this.localModel = localModel; - this.learnToRankService = learnToRankService; + this.learningToRankService = learningToRankService; } - public LearnToRankRescorerBuilder(StreamInput input, LearnToRankService learnToRankService) throws IOException { + public LearningToRankRescorerBuilder(StreamInput input, LearningToRankService learningToRankService) throws IOException { super(input); this.modelId = input.readString(); this.params = input.readMap(); - this.learnToRankConfig = (LearnToRankConfig) input.readOptionalNamedWriteable(InferenceConfig.class); - this.learnToRankService = learnToRankService; + this.learningToRankConfig = (LearningToRankConfig) input.readOptionalNamedWriteable(InferenceConfig.class); + this.learningToRankService = learningToRankService; this.localModel = null; } @@ -104,12 +104,12 @@ public Map params() { return params; } - public LearnToRankConfig learnToRankConfig() { - return learnToRankConfig; + public LearningToRankConfig learningToRankConfig() { + return learningToRankConfig; } - public LearnToRankService learnToRankService() { - return learnToRankService; + public LearningToRankService learningToRankService() { + return learningToRankService; } public LocalModel localModel() { @@ -117,7 +117,7 @@ public LocalModel localModel() { } @Override - public RescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException { + public RescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException { if (ctx.convertToDataRewriteContext() != null) { return doDataNodeRewrite(ctx); } @@ -133,41 +133,46 @@ public RescorerBuilder rewrite(QueryRewriteContext c * This can and be done on the coordinator as it not only validates if the stored model is of the appropriate type, it allows * any stored logic to rewrite on the coordinator level if possible. * @param ctx QueryRewriteContext - * @return rewritten LearnToRankRescorerBuilder or self if no changes + * @return rewritten LearningToRankRescorerBuilder or self if no changes * @throws IOException when rewrite fails */ - private RescorerBuilder doCoordinatorNodeRewrite(QueryRewriteContext ctx) throws IOException { + private RescorerBuilder doCoordinatorNodeRewrite(QueryRewriteContext ctx) throws IOException { // We have requested for the stored config and fetch is completed, get the config and rewrite further if required - if (learnToRankConfig != null) { - LearnToRankConfig rewrittenConfig = Rewriteable.rewrite(learnToRankConfig, ctx); - if (rewrittenConfig == learnToRankConfig) { + if (learningToRankConfig != null) { + LearningToRankConfig rewrittenConfig = Rewriteable.rewrite(learningToRankConfig, ctx); + if (rewrittenConfig == learningToRankConfig) { return this; } - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder(modelId, rewrittenConfig, params, learnToRankService); + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( + modelId, + rewrittenConfig, + params, + learningToRankService + ); if (windowSize != null) { builder.windowSize(windowSize); } return builder; } - if (learnToRankService == null) { - throw new IllegalStateException("Learn to rank service must be available"); + if (learningToRankService == null) { + throw new IllegalStateException("Learning to rank service must be available"); } - SetOnce configSetOnce = new SetOnce<>(); + SetOnce configSetOnce = new SetOnce<>(); GetTrainedModelsAction.Request request = new GetTrainedModelsAction.Request(modelId); request.setAllowNoResources(false); ctx.registerAsyncAction( - (c, l) -> learnToRankService.loadLearnToRankConfig(modelId, params, ActionListener.wrap(learnToRankConfig -> { - configSetOnce.set(learnToRankConfig); + (c, l) -> learningToRankService.loadLearningToRankConfig(modelId, params, ActionListener.wrap(learningToRankConfig -> { + configSetOnce.set(learningToRankConfig); l.onResponse(null); }, l::onFailure)) ); - LearnToRankRescorerBuilder builder = new RewritingLearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new RewritingLearningToRankRescorerBuilder( (rewritingBuilder) -> configSetOnce.get() == null ? rewritingBuilder - : new LearnToRankRescorerBuilder(modelId, configSetOnce.get(), params, learnToRankService) + : new LearningToRankRescorerBuilder(modelId, configSetOnce.get(), params, learningToRankService) ); if (windowSize() != null) { @@ -181,28 +186,28 @@ private RescorerBuilder doCoordinatorNodeRewrite(Que * @param ctx Rewrite context * @return A rewritten rescorer with a model definition or a model definition supplier populated */ - private RescorerBuilder doDataNodeRewrite(QueryRewriteContext ctx) throws IOException { - assert learnToRankConfig != null; + private RescorerBuilder doDataNodeRewrite(QueryRewriteContext ctx) throws IOException { + assert learningToRankConfig != null; // The model is already loaded, no need to rewrite further. if (localModel != null) { return this; } - if (learnToRankService == null) { - throw new IllegalStateException("Learn to rank service must be available"); + if (learningToRankService == null) { + throw new IllegalStateException("Learning to rank service must be available"); } - LearnToRankConfig rewrittenConfig = Rewriteable.rewrite(learnToRankConfig, ctx); + LearningToRankConfig rewrittenConfig = Rewriteable.rewrite(learningToRankConfig, ctx); SetOnce localModelSetOnce = new SetOnce<>(); - ctx.registerAsyncAction((c, l) -> learnToRankService.loadLocalModel(modelId, ActionListener.wrap(lm -> { + ctx.registerAsyncAction((c, l) -> learningToRankService.loadLocalModel(modelId, ActionListener.wrap(lm -> { localModelSetOnce.set(lm); l.onResponse(null); }, l::onFailure))); - LearnToRankRescorerBuilder builder = new RewritingLearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new RewritingLearningToRankRescorerBuilder( (rewritingBuilder) -> localModelSetOnce.get() != null - ? new LearnToRankRescorerBuilder(localModelSetOnce.get(), rewrittenConfig, params, learnToRankService) + ? new LearningToRankRescorerBuilder(localModelSetOnce.get(), rewrittenConfig, params, learningToRankService) : rewritingBuilder ); @@ -218,15 +223,20 @@ private RescorerBuilder doDataNodeRewrite(QueryRewri * @return A rewritten rescorer with a model definition or a model definition supplier populated * @throws IOException If fetching, parsing, or overall rewrite failures occur */ - private RescorerBuilder doSearchRewrite(QueryRewriteContext ctx) throws IOException { - if (learnToRankConfig == null) { + private RescorerBuilder doSearchRewrite(QueryRewriteContext ctx) throws IOException { + if (learningToRankConfig == null) { return this; } - LearnToRankConfig rewrittenConfig = Rewriteable.rewrite(learnToRankConfig, ctx); - if (rewrittenConfig == learnToRankConfig) { + LearningToRankConfig rewrittenConfig = Rewriteable.rewrite(learningToRankConfig, ctx); + if (rewrittenConfig == learningToRankConfig) { return this; } - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder(localModel, rewrittenConfig, params, learnToRankService); + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( + localModel, + rewrittenConfig, + params, + learningToRankService + ); if (windowSize != null) { builder.windowSize(windowSize); } @@ -234,9 +244,9 @@ private RescorerBuilder doSearchRewrite(QueryRewrite } @Override - protected LearnToRankRescorerContext innerBuildContext(int windowSize, SearchExecutionContext context) { + protected LearningToRankRescorerContext innerBuildContext(int windowSize, SearchExecutionContext context) { rescoreOccurred = true; - return new LearnToRankRescorerContext(windowSize, LearnToRankRescorer.INSTANCE, learnToRankConfig, localModel, context); + return new LearningToRankRescorerContext(windowSize, LearningToRankRescorer.INSTANCE, learningToRankConfig, localModel, context); } @Override @@ -255,7 +265,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { assert localModel == null || rescoreOccurred : "Unnecessarily populated local model object"; out.writeString(modelId); out.writeGenericMap(params); - out.writeOptionalNamedWriteable(learnToRankConfig); + out.writeOptionalNamedWriteable(learningToRankConfig); } @Override @@ -273,19 +283,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; - LearnToRankRescorerBuilder that = (LearnToRankRescorerBuilder) o; + LearningToRankRescorerBuilder that = (LearningToRankRescorerBuilder) o; return Objects.equals(modelId, that.modelId) && Objects.equals(params, that.params) - && Objects.equals(learnToRankConfig, that.learnToRankConfig) + && Objects.equals(learningToRankConfig, that.learningToRankConfig) && Objects.equals(localModel, that.localModel) - && Objects.equals(learnToRankService, that.learnToRankService) + && Objects.equals(learningToRankService, that.learningToRankService) && rescoreOccurred == that.rescoreOccurred; } @Override public int hashCode() { - return Objects.hash(super.hashCode(), modelId, params, learnToRankConfig, localModel, learnToRankService, rescoreOccurred); + return Objects.hash(super.hashCode(), modelId, params, learningToRankConfig, localModel, learningToRankService, rescoreOccurred); } static class Builder { @@ -300,23 +310,25 @@ public void setParams(Map params) { this.params = params; } - LearnToRankRescorerBuilder build(LearnToRankService learnToRankService) { - return new LearnToRankRescorerBuilder(modelId, params, learnToRankService); + LearningToRankRescorerBuilder build(LearningToRankService learningToRankService) { + return new LearningToRankRescorerBuilder(modelId, params, learningToRankService); } } - private static class RewritingLearnToRankRescorerBuilder extends LearnToRankRescorerBuilder { + private static class RewritingLearningToRankRescorerBuilder extends LearningToRankRescorerBuilder { - private final Function rewriteFunction; + private final Function rewriteFunction; - RewritingLearnToRankRescorerBuilder(Function rewriteFunction) { + RewritingLearningToRankRescorerBuilder( + Function rewriteFunction + ) { super(null, null, null); this.rewriteFunction = rewriteFunction; } @Override - public RescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException { - LearnToRankRescorerBuilder builder = this.rewriteFunction.apply(this); + public RescorerBuilder rewrite(QueryRewriteContext ctx) throws IOException { + LearningToRankRescorerBuilder builder = this.rewriteFunction.apply(this); if (windowSize() != null) { builder.windowSize(windowSize()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java similarity index 81% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerContext.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java index 844f96208cb35..b1df3a2da7c42 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerContext.java @@ -15,8 +15,8 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.rescore.Rescorer; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; @@ -24,34 +24,34 @@ import java.util.ArrayList; import java.util.List; -public class LearnToRankRescorerContext extends RescoreContext { +public class LearningToRankRescorerContext extends RescoreContext { final SearchExecutionContext executionContext; final LocalModel regressionModelDefinition; - final LearnToRankConfig learnToRankConfig; + final LearningToRankConfig learningToRankConfig; /** * @param windowSize how many documents to rescore * @param rescorer The rescorer to apply - * @param learnToRankConfig The inference config containing updated and rewritten parameters + * @param learningToRankConfig The inference config containing updated and rewritten parameters * @param regressionModelDefinition The local model inference definition, may be null during certain search phases. * @param executionContext The local shard search context */ - public LearnToRankRescorerContext( + public LearningToRankRescorerContext( int windowSize, Rescorer rescorer, - LearnToRankConfig learnToRankConfig, + LearningToRankConfig learningToRankConfig, LocalModel regressionModelDefinition, SearchExecutionContext executionContext ) { super(windowSize, rescorer); this.executionContext = executionContext; this.regressionModelDefinition = regressionModelDefinition; - this.learnToRankConfig = learnToRankConfig; + this.learningToRankConfig = learningToRankConfig; } List buildFeatureExtractors(IndexSearcher searcher) throws IOException { - assert this.regressionModelDefinition != null && this.learnToRankConfig != null; + assert this.regressionModelDefinition != null && this.learningToRankConfig != null; List featureExtractors = new ArrayList<>(); if (this.regressionModelDefinition.inputFields().isEmpty() == false) { featureExtractors.add( @@ -60,7 +60,7 @@ List buildFeatureExtractors(IndexSearcher searcher) throws IOE } List weights = new ArrayList<>(); List queryFeatureNames = new ArrayList<>(); - for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : learnToRankConfig.getFeatureExtractorBuilders()) { + for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : learningToRankConfig.getFeatureExtractorBuilders()) { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { Query query = executionContext.toQuery(queryExtractorBuilder.query().getParsedQuery()).query(); Weight weight = searcher.rewrite(query).createWeight(searcher, ScoreMode.COMPLETE, 1f); @@ -77,11 +77,11 @@ List buildFeatureExtractors(IndexSearcher searcher) throws IOE @Override public List getParsedQueries() { - if (this.learnToRankConfig == null) { + if (this.learningToRankConfig == null) { return List.of(); } List parsedQueries = new ArrayList<>(); - for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : learnToRankConfig.getFeatureExtractorBuilders()) { + for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : learningToRankConfig.getFeatureExtractorBuilders()) { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { parsedQueries.add(executionContext.toQuery(queryExtractorBuilder.query().getParsedQuery())); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java similarity index 57% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java index 18b2c6fe5ff3f..42598691beec2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerFeature.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java @@ -10,19 +10,19 @@ import org.elasticsearch.common.util.FeatureFlag; /** - * Learn to rank feature flag. When the feature is complete, this flag will be removed. + * Learning to rank feature flag. When the feature is complete, this flag will be removed. * * Upon removal, ensure transport serialization is all corrected for future BWC. * - * See {@link LearnToRankRescorerBuilder} + * See {@link LearningToRankRescorerBuilder} */ -public class LearnToRankRescorerFeature { +public class LearningToRankRescorerFeature { - private LearnToRankRescorerFeature() {} + private LearningToRankRescorerFeature() {} - private static final FeatureFlag LEARN_TO_RANK = new FeatureFlag("learn_to_rank"); + private static final FeatureFlag LEARNING_TO_RANK = new FeatureFlag("learning_to_rank"); public static boolean isEnabled() { - return LEARN_TO_RANK.isEnabled(); + return LEARNING_TO_RANK.isEnabled(); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java similarity index 86% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java index 2f85000705d8a..177099801e0a5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java @@ -26,8 +26,8 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -51,7 +51,7 @@ import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.core.ml.job.messages.Messages.INFERENCE_CONFIG_QUERY_BAD_FORMAT; -public class LearnToRankService { +public class LearningToRankService { private static final Map SCRIPT_OPTIONS = Map.ofEntries( entry(MustacheScriptEngine.DETECT_MISSING_PARAMS_OPTION, Boolean.TRUE.toString()) ); @@ -60,7 +60,7 @@ public class LearnToRankService { private final ScriptService scriptService; private final XContentParserConfiguration parserConfiguration; - public LearnToRankService( + public LearningToRankService( ModelLoadingService modelLoadingService, TrainedModelProvider trainedModelProvider, ScriptService scriptService, @@ -69,7 +69,7 @@ public LearnToRankService( this(modelLoadingService, trainedModelProvider, scriptService, XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry)); } - LearnToRankService( + LearningToRankService( ModelLoadingService modelLoadingService, TrainedModelProvider trainedModelProvider, ScriptService scriptService, @@ -82,30 +82,30 @@ public LearnToRankService( } /** - * Asynchronously load a regression model to be used for learn to rank. + * Asynchronously load a regression model to be used for learning to rank. * * @param modelId The model id to be loaded. * @param listener Response listener. */ public void loadLocalModel(String modelId, ActionListener listener) { - modelLoadingService.getModelForLearnToRank(modelId, listener); + modelLoadingService.getModelForLearningToRank(modelId, listener); } /** - * Asynchronously load the learn to rank config by model id. + * Asynchronously load the learning to rank config by model id. * Once the model is loaded, templates are executed using params provided. * * @param modelId Id of the model. * @param params Templates params. * @param listener Response listener. */ - public void loadLearnToRankConfig(String modelId, Map params, ActionListener listener) { + public void loadLearningToRankConfig(String modelId, Map params, ActionListener listener) { trainedModelProvider.getTrainedModel( modelId, GetTrainedModelsAction.Includes.all(), null, ActionListener.wrap(trainedModelConfig -> { - if (trainedModelConfig.getInferenceConfig() instanceof LearnToRankConfig retrievedInferenceConfig) { + if (trainedModelConfig.getInferenceConfig() instanceof LearningToRankConfig retrievedInferenceConfig) { listener.onResponse(applyParams(retrievedInferenceConfig, params)); return; } @@ -114,7 +114,7 @@ public void loadLearnToRankConfig(String modelId, Map params, Ac Messages.getMessage( Messages.INFERENCE_CONFIG_INCORRECT_TYPE, Optional.ofNullable(trainedModelConfig.getInferenceConfig()).map(InferenceConfig::getName).orElse("null"), - LearnToRankConfig.NAME.getPreferredName() + LearningToRankConfig.NAME.getPreferredName() ) ) ); @@ -123,29 +123,29 @@ public void loadLearnToRankConfig(String modelId, Map params, Ac } /** - * Applies templates params to a {@link LearnToRankConfig} object. + * Applies templates params to a {@link LearningToRankConfig} object. * * @param config Original config. * @param params Templates params. - * @return A {@link LearnToRankConfig} object with templates applied. + * @return A {@link LearningToRankConfig} object with templates applied. * * @throws IOException */ - private LearnToRankConfig applyParams(LearnToRankConfig config, Map params) throws Exception { + private LearningToRankConfig applyParams(LearningToRankConfig config, Map params) throws Exception { if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG) == false) { return config; } - List featureExtractorBuilders = new ArrayList<>(); + List featureExtractorBuilders = new ArrayList<>(); Map mergedParams = new HashMap<>(Objects.requireNonNullElse(params, Map.of())); mergeDefaults(mergedParams, config.getParamsDefaults()); - for (LearnToRankFeatureExtractorBuilder featureExtractorBuilder : config.getFeatureExtractorBuilders()) { + for (LearningToRankFeatureExtractorBuilder featureExtractorBuilder : config.getFeatureExtractorBuilders()) { featureExtractorBuilders.add(applyParams(featureExtractorBuilder, mergedParams)); } - return LearnToRankConfig.builder(config).setLearnToRankFeatureExtractorBuilders(featureExtractorBuilders).build(); + return LearningToRankConfig.builder(config).setLearningToRankFeatureExtractorBuilders(featureExtractorBuilders).build(); } /** @@ -157,8 +157,8 @@ private LearnToRankConfig applyParams(LearnToRankConfig config, Map params ) throws Exception { if (featureExtractorBuilder instanceof QueryExtractorBuilder queryExtractorBuilder) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderRewriteTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java similarity index 66% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderRewriteTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java index 5939d012831aa..3bfe8aa390d8b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderRewriteTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderRewriteTests.java @@ -26,17 +26,17 @@ import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.test.AbstractBuilderTestCase; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearnToRankFeatureExtractorBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.LearningToRankFeatureExtractorBuilder; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import java.io.IOException; import java.util.List; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfigTests.randomLearnToRankConfig; -import static org.elasticsearch.xpack.ml.inference.ltr.LearnToRankServiceTests.BAD_MODEL; -import static org.elasticsearch.xpack.ml.inference.ltr.LearnToRankServiceTests.GOOD_MODEL; -import static org.elasticsearch.xpack.ml.inference.ltr.LearnToRankServiceTests.GOOD_MODEL_CONFIG; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfigTests.randomLearningToRankConfig; +import static org.elasticsearch.xpack.ml.inference.ltr.LearningToRankServiceTests.BAD_MODEL; +import static org.elasticsearch.xpack.ml.inference.ltr.LearningToRankServiceTests.GOOD_MODEL; +import static org.elasticsearch.xpack.ml.inference.ltr.LearningToRankServiceTests.GOOD_MODEL_CONFIG; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -50,19 +50,19 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class LearnToRankRescorerBuilderRewriteTests extends AbstractBuilderTestCase { +public class LearningToRankRescorerBuilderRewriteTests extends AbstractBuilderTestCase { public void testMustRewrite() { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder( + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( GOOD_MODEL, - randomLearnToRankConfig(), + randomLearningToRankConfig(), null, - learnToRankService + learningToRankService ); SearchExecutionContext context = createSearchExecutionContext(); - LearnToRankRescorerContext rescorerContext = rescorerBuilder.innerBuildContext(randomIntBetween(1, 30), context); + LearningToRankRescorerContext rescorerContext = rescorerBuilder.innerBuildContext(randomIntBetween(1, 30), context); IllegalStateException e = expectThrows( IllegalStateException.class, () -> rescorerContext.rescorer() @@ -76,25 +76,25 @@ public void testMustRewrite() { } public void testRewriteOnCoordinator() throws IOException { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder(GOOD_MODEL, null, learnToRankService); + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder(GOOD_MODEL, null, learningToRankService); rescorerBuilder.windowSize(4); CoordinatorRewriteContext context = createCoordinatorRewriteContext( new DateFieldMapper.DateFieldType("@timestamp"), randomIntBetween(0, 1_100_000), randomIntBetween(1_500_000, Integer.MAX_VALUE) ); - LearnToRankRescorerBuilder rewritten = rewriteAndFetch(rescorerBuilder, context); - assertThat(rewritten.learnToRankConfig(), not(nullValue())); - assertThat(rewritten.learnToRankConfig().getNumTopFeatureImportanceValues(), equalTo(2)); + LearningToRankRescorerBuilder rewritten = rewriteAndFetch(rescorerBuilder, context); + assertThat(rewritten.learningToRankConfig(), not(nullValue())); + assertThat(rewritten.learningToRankConfig().getNumTopFeatureImportanceValues(), equalTo(2)); assertThat( "feature_1", is( in( - rewritten.learnToRankConfig() + rewritten.learningToRankConfig() .getFeatureExtractorBuilders() .stream() - .map(LearnToRankFeatureExtractorBuilder::featureName) + .map(LearningToRankFeatureExtractorBuilder::featureName) .toList() ) ) @@ -103,8 +103,8 @@ public void testRewriteOnCoordinator() throws IOException { } public void testRewriteOnCoordinatorWithBadModel() throws IOException { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder(BAD_MODEL, null, learnToRankService); + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder(BAD_MODEL, null, learningToRankService); CoordinatorRewriteContext context = createCoordinatorRewriteContext( new DateFieldMapper.DateFieldType("@timestamp"), randomIntBetween(0, 1_100_000), @@ -115,8 +115,8 @@ public void testRewriteOnCoordinatorWithBadModel() throws IOException { } public void testRewriteOnCoordinatorWithMissingModel() { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder("missing_model", null, learnToRankService); + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder("missing_model", null, learningToRankService); CoordinatorRewriteContext context = createCoordinatorRewriteContext( new DateFieldMapper.DateFieldType("@timestamp"), randomIntBetween(0, 1_100_000), @@ -129,27 +129,27 @@ public void testRewriteOnShard() throws IOException { LocalModel localModel = mock(LocalModel.class); when(localModel.getModelId()).thenReturn(GOOD_MODEL); - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder( + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( localModel, - (LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), + (LearningToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), null, - learnToRankService + learningToRankService ); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); - LearnToRankRescorerBuilder rewritten = (LearnToRankRescorerBuilder) rescorerBuilder.rewrite(createSearchExecutionContext()); + LearningToRankRescorerBuilder rewritten = (LearningToRankRescorerBuilder) rescorerBuilder.rewrite(createSearchExecutionContext()); assertFalse(searchExecutionContext.hasAsyncActions()); assertSame(localModel, rewritten.localModel()); assertEquals(localModel.getModelId(), rewritten.modelId()); } public void testRewriteAndFetchOnDataNode() throws IOException { - LearnToRankService learnToRankService = learnToRankServiceMock(); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder( + LearningToRankService learningToRankService = learningToRankServiceMock(); + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( GOOD_MODEL, - randomLearnToRankConfig(), + randomLearningToRankConfig(), null, - learnToRankService + learningToRankService ); boolean setWindowSize = randomBoolean(); @@ -157,7 +157,7 @@ public void testRewriteAndFetchOnDataNode() throws IOException { rescorerBuilder.windowSize(42); } DataRewriteContext rewriteContext = dataRewriteContext(); - LearnToRankRescorerBuilder rewritten = (LearnToRankRescorerBuilder) rescorerBuilder.rewrite(rewriteContext); + LearningToRankRescorerBuilder rewritten = (LearningToRankRescorerBuilder) rescorerBuilder.rewrite(rewriteContext); assertNotSame(rescorerBuilder, rewritten); assertTrue(rewriteContext.hasAsyncActions()); if (setWindowSize) { @@ -166,8 +166,8 @@ public void testRewriteAndFetchOnDataNode() throws IOException { } @SuppressWarnings("unchecked") - private static LearnToRankService learnToRankServiceMock() { - LearnToRankService learnToRankService = mock(LearnToRankService.class); + private static LearningToRankService learningToRankServiceMock() { + LearningToRankService learningToRankService = mock(LearningToRankService.class); doAnswer(invocation -> { String modelId = invocation.getArgument(0); @@ -180,15 +180,15 @@ private static LearnToRankService learnToRankServiceMock() { l.onFailure(new ResourceNotFoundException("missing model")); } return null; - }).when(learnToRankService).loadLearnToRankConfig(anyString(), any(), any()); + }).when(learningToRankService).loadLearningToRankConfig(anyString(), any(), any()); doAnswer(invocation -> { ActionListener l = invocation.getArgument(1, ActionListener.class); l.onResponse(mock(LocalModel.class)); return null; - }).when(learnToRankService).loadLocalModel(anyString(), any()); + }).when(learningToRankService).loadLocalModel(anyString(), any()); - return learnToRankService; + return learningToRankService; } public void testBuildContext() throws Exception { @@ -200,14 +200,14 @@ public void testBuildContext() throws Exception { doAnswer(invocation -> invocation.getArgument(0)).when(searcher).rewrite(any(Query.class)); SearchExecutionContext context = createSearchExecutionContext(searcher); - LearnToRankRescorerBuilder rescorerBuilder = new LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder rescorerBuilder = new LearningToRankRescorerBuilder( localModel, - (LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), + (LearningToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig(), null, - mock(LearnToRankService.class) + mock(LearningToRankService.class) ); - LearnToRankRescorerContext rescoreContext = rescorerBuilder.innerBuildContext(20, context); + LearningToRankRescorerContext rescoreContext = rescorerBuilder.innerBuildContext(20, context); assertNotNull(rescoreContext); assertThat(rescoreContext.getWindowSize(), equalTo(20)); List featureExtractors = rescoreContext.buildFeatureExtractors(context.searcher()); @@ -218,9 +218,12 @@ public void testBuildContext() throws Exception { ); } - private LearnToRankRescorerBuilder rewriteAndFetch(RescorerBuilder builder, QueryRewriteContext context) { - PlainActionFuture> future = new PlainActionFuture<>(); + private LearningToRankRescorerBuilder rewriteAndFetch( + RescorerBuilder builder, + QueryRewriteContext context + ) { + PlainActionFuture> future = new PlainActionFuture<>(); Rewriteable.rewriteAndFetch(builder, context, future); - return (LearnToRankRescorerBuilder) future.actionGet(); + return (LearningToRankRescorerBuilder) future.actionGet(); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderSerializationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java similarity index 71% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderSerializationTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java index 4f6e5a0512f73..79044a465442b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankRescorerBuilderSerializationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; @@ -30,18 +30,18 @@ import java.util.Map; import static org.elasticsearch.search.rank.RankBuilder.WINDOW_SIZE_FIELD; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfigTests.randomLearnToRankConfig; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfigTests.randomLearningToRankConfig; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class LearnToRankRescorerBuilderSerializationTests extends AbstractBWCSerializationTestCase { +public class LearningToRankRescorerBuilderSerializationTests extends AbstractBWCSerializationTestCase { - private static LearnToRankService learnToRankService = mock(LearnToRankService.class); + private static LearningToRankService learningToRankService = mock(LearningToRankService.class); @Override - protected LearnToRankRescorerBuilder doParseInstance(XContentParser parser) throws IOException { + protected LearningToRankRescorerBuilder doParseInstance(XContentParser parser) throws IOException { String fieldName = null; - LearnToRankRescorerBuilder rescorer = null; + LearningToRankRescorerBuilder rescorer = null; Integer windowSize = null; XContentParser.Token token = parser.nextToken(); assert token == XContentParser.Token.START_OBJECT; @@ -55,7 +55,7 @@ protected LearnToRankRescorerBuilder doParseInstance(XContentParser parser) thro throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { - rescorer = LearnToRankRescorerBuilder.fromXContent(parser, learnToRankService); + rescorer = LearningToRankRescorerBuilder.fromXContent(parser, learningToRankService); } else { throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); } @@ -70,19 +70,19 @@ protected LearnToRankRescorerBuilder doParseInstance(XContentParser parser) thro } @Override - protected Writeable.Reader instanceReader() { - return in -> new LearnToRankRescorerBuilder(in, learnToRankService); + protected Writeable.Reader instanceReader() { + return in -> new LearningToRankRescorerBuilder(in, learningToRankService); } @Override - protected LearnToRankRescorerBuilder createTestInstance() { - LearnToRankRescorerBuilder builder = randomBoolean() + protected LearningToRankRescorerBuilder createTestInstance() { + LearningToRankRescorerBuilder builder = randomBoolean() ? createXContextTestInstance(null) - : new LearnToRankRescorerBuilder( + : new LearningToRankRescorerBuilder( randomAlphaOfLength(10), - randomLearnToRankConfig(), + randomLearningToRankConfig(), randomBoolean() ? randomParams() : null, - learnToRankService + learningToRankService ); if (randomBoolean()) { @@ -93,34 +93,34 @@ protected LearnToRankRescorerBuilder createTestInstance() { } @Override - protected LearnToRankRescorerBuilder createXContextTestInstance(XContentType xContentType) { - return new LearnToRankRescorerBuilder(randomAlphaOfLength(10), randomBoolean() ? randomParams() : null, learnToRankService); + protected LearningToRankRescorerBuilder createXContextTestInstance(XContentType xContentType) { + return new LearningToRankRescorerBuilder(randomAlphaOfLength(10), randomBoolean() ? randomParams() : null, learningToRankService); } @Override - protected LearnToRankRescorerBuilder mutateInstance(LearnToRankRescorerBuilder instance) throws IOException { + protected LearningToRankRescorerBuilder mutateInstance(LearningToRankRescorerBuilder instance) throws IOException { int i = randomInt(4); return switch (i) { case 0 -> { - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( randomValueOtherThan(instance.modelId(), () -> randomAlphaOfLength(10)), instance.params(), - learnToRankService + learningToRankService ); if (instance.windowSize() != null) { builder.windowSize(instance.windowSize()); } yield builder; } - case 1 -> new LearnToRankRescorerBuilder(instance.modelId(), instance.params(), learnToRankService).windowSize( + case 1 -> new LearningToRankRescorerBuilder(instance.modelId(), instance.params(), learningToRankService).windowSize( randomValueOtherThan(instance.windowSize(), () -> randomIntBetween(1, 10000)) ); case 2 -> { - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( instance.modelId(), randomValueOtherThan(instance.params(), () -> (randomBoolean() ? randomParams() : null)), - learnToRankService + learningToRankService ); if (instance.windowSize() != null) { builder.windowSize(instance.windowSize() + 1); @@ -128,12 +128,15 @@ protected LearnToRankRescorerBuilder mutateInstance(LearnToRankRescorerBuilder i yield builder; } case 3 -> { - LearnToRankConfig learnToRankConfig = randomValueOtherThan(instance.learnToRankConfig(), () -> randomLearnToRankConfig()); - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder( + LearningToRankConfig learningToRankConfig = randomValueOtherThan( + instance.learningToRankConfig(), + () -> randomLearningToRankConfig() + ); + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( instance.modelId(), - learnToRankConfig, + learningToRankConfig, null, - learnToRankService + learningToRankService ); if (instance.windowSize() != null) { builder.windowSize(instance.windowSize()); @@ -141,11 +144,11 @@ protected LearnToRankRescorerBuilder mutateInstance(LearnToRankRescorerBuilder i yield builder; } case 4 -> { - LearnToRankRescorerBuilder builder = new LearnToRankRescorerBuilder( + LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( mock(LocalModel.class), - instance.learnToRankConfig(), + instance.learningToRankConfig(), instance.params(), - learnToRankService + learningToRankService ); if (instance.windowSize() != null) { builder.windowSize(instance.windowSize()); @@ -157,7 +160,7 @@ protected LearnToRankRescorerBuilder mutateInstance(LearnToRankRescorerBuilder i } @Override - protected LearnToRankRescorerBuilder mutateInstanceForVersion(LearnToRankRescorerBuilder instance, TransportVersion version) { + protected LearningToRankRescorerBuilder mutateInstanceForVersion(LearningToRankRescorerBuilder instance, TransportVersion version) { return instance; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java similarity index 77% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java index cbe91ba874e6d..39d0af9041d03 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearnToRankServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearnToRankConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ltr.QueryExtractorBuilder; import org.elasticsearch.xpack.core.ml.ltr.MlLTRNamedXContentProvider; @@ -49,7 +49,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; -public class LearnToRankServiceTests extends ESTestCase { +public class LearningToRankServiceTests extends ESTestCase { public static final String GOOD_MODEL = "modelId"; public static final String BAD_MODEL = "badModel"; public static final TrainedModelConfig GOOD_MODEL_CONFIG = TrainedModelConfig.builder() @@ -59,7 +59,7 @@ public class LearnToRankServiceTests extends ESTestCase { .setModelSize(2) .setModelType(TrainedModelType.TREE_ENSEMBLE) .setInferenceConfig( - new LearnToRankConfig( + new LearningToRankConfig( 2, List.of( new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "foo")), @@ -79,44 +79,44 @@ public class LearnToRankServiceTests extends ESTestCase { .build(); @SuppressWarnings("unchecked") - public void testLoadLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = getTestLearnToRankService(); - ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(GOOD_MODEL, Map.of(), listener); + public void testLoadLearningToRankConfig() throws Exception { + LearningToRankService learningToRankService = getTestLearningToRankService(); + ActionListener listener = mock(ActionListener.class); + learningToRankService.loadLearningToRankConfig(GOOD_MODEL, Map.of(), listener); - verify(listener).onResponse(eq((LearnToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig())); + verify(listener).onResponse(eq((LearningToRankConfig) GOOD_MODEL_CONFIG.getInferenceConfig())); } @SuppressWarnings("unchecked") - public void testLoadMissingLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = getTestLearnToRankService(); - ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig("non-existing-model", Map.of(), listener); + public void testLoadMissingLearningToRankConfig() throws Exception { + LearningToRankService learningToRankService = getTestLearningToRankService(); + ActionListener listener = mock(ActionListener.class); + learningToRankService.loadLearningToRankConfig("non-existing-model", Map.of(), listener); verify(listener).onFailure(isA(ResourceNotFoundException.class)); } @SuppressWarnings("unchecked") - public void testLoadBadLearnToRankConfig() throws Exception { - LearnToRankService learnToRankService = getTestLearnToRankService(); - ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig(BAD_MODEL, Map.of(), listener); + public void testLoadBadLearningToRankConfig() throws Exception { + LearningToRankService learningToRankService = getTestLearningToRankService(); + ActionListener listener = mock(ActionListener.class); + learningToRankService.loadLearningToRankConfig(BAD_MODEL, Map.of(), listener); verify(listener).onFailure(isA(ElasticsearchStatusException.class)); } @SuppressWarnings("unchecked") - public void testLoadLearnToRankConfigWithTemplate() throws Exception { - LearnToRankConfig learnToRankConfig = new LearnToRankConfig( + public void testLoadLearningToRankConfigWithTemplate() throws Exception { + LearningToRankConfig learningToRankConfig = new LearningToRankConfig( 0, List.of(new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "{{foo_param}}"))), Map.of() ); - LearnToRankService learnToRankService = getTestLearnToRankService(learnToRankConfig); - ActionListener listener = mock(ActionListener.class); + LearningToRankService learningToRankService = getTestLearningToRankService(learningToRankConfig); + ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig("model-id", Map.ofEntries(Map.entry("foo_param", "foo")), listener); + learningToRankService.loadLearningToRankConfig("model-id", Map.ofEntries(Map.entry("foo_param", "foo")), listener); verify(listener).onResponse(argThat(retrievedConfig -> { assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(1)); QueryExtractorBuilder queryExtractorBuilder = retrievedConfig.getQueryFeatureExtractorBuilders().get(0); @@ -127,8 +127,8 @@ public void testLoadLearnToRankConfigWithTemplate() throws Exception { } @SuppressWarnings("unchecked") - public void testLoadLearnToRankConfigWithMissingTemplateParams() throws Exception { - LearnToRankConfig learnToRankConfig = new LearnToRankConfig( + public void testLoadLearningToRankConfigWithMissingTemplateParams() throws Exception { + LearningToRankConfig learningToRankConfig = new LearningToRankConfig( 0, List.of( new QueryExtractorBuilder("feature_1", QueryProviderTests.createTestQueryProvider("field_1", "foo")), @@ -139,10 +139,10 @@ public void testLoadLearnToRankConfigWithMissingTemplateParams() throws Exceptio Map.of("baz_param", "default_value") ); - LearnToRankService learnToRankService = getTestLearnToRankService(learnToRankConfig); - ActionListener listener = mock(ActionListener.class); + LearningToRankService learningToRankService = getTestLearningToRankService(learningToRankConfig); + ActionListener listener = mock(ActionListener.class); - learnToRankService.loadLearnToRankConfig("model-id", randomBoolean() ? null : Map.of(), listener); + learningToRankService.loadLearningToRankConfig("model-id", randomBoolean() ? null : Map.of(), listener); verify(listener).onResponse(argThat(retrievedConfig -> { // Check all features are present. assertThat(retrievedConfig.getFeatureExtractorBuilders(), hasSize(4)); @@ -208,12 +208,12 @@ private TrainedModelProvider mockTrainedModelProvider() { return trainedModelProvider; } - private LearnToRankService getTestLearnToRankService() { - return getTestLearnToRankService(mockTrainedModelProvider()); + private LearningToRankService getTestLearningToRankService() { + return getTestLearningToRankService(mockTrainedModelProvider()); } @SuppressWarnings("unchecked") - private LearnToRankService getTestLearnToRankService(LearnToRankConfig learnToRankConfig) { + private LearningToRankService getTestLearningToRankService(LearningToRankConfig learningToRankConfig) { TrainedModelProvider trainedModelProvider = mock(TrainedModelProvider.class); doAnswer(invocation -> { @@ -227,18 +227,18 @@ private LearnToRankService getTestLearnToRankService(LearnToRankConfig learnToRa .setEstimatedOperations(1) .setModelSize(2) .setModelType(TrainedModelType.TREE_ENSEMBLE) - .setInferenceConfig(learnToRankConfig) + .setInferenceConfig(learningToRankConfig) .build() ); return null; }).when(trainedModelProvider).getTrainedModel(any(), any(), any(), any()); - return getTestLearnToRankService(trainedModelProvider); + return getTestLearningToRankService(trainedModelProvider); } - private LearnToRankService getTestLearnToRankService(TrainedModelProvider trainedModelProvider) { - return new LearnToRankService(mockModelLoadingService(), trainedModelProvider, getTestScriptService(), xContentRegistry()); + private LearningToRankService getTestLearningToRankService(TrainedModelProvider trainedModelProvider) { + return new LearningToRankService(mockModelLoadingService(), trainedModelProvider, getTestScriptService(), xContentRegistry()); } private ScriptService getTestScriptService() { diff --git a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 3fd8e952d626e..0efe2797c7f76 100644 --- a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -43,7 +43,7 @@ public class XPackRestIT extends AbstractXPackRestTest { .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") .user("x_pack_rest_user", "x-pack-test-password") .feature(FeatureFlag.TIME_SERIES_MODE) - .feature(FeatureFlag.LEARN_TO_RANK) + .feature(FeatureFlag.LEARNING_TO_RANK) .configFile("testnode.pem", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) .configFile("testnode.crt", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .configFile("service_tokens", Resource.fromClasspath("service_tokens")) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learn_to_rank_rescorer.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml similarity index 94% rename from x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learn_to_rank_rescorer.yml rename to x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml index a0ae4b7c44316..e307e72d2ca4f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learn_to_rank_rescorer.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/learning_to_rank_rescorer.yml @@ -11,7 +11,7 @@ setup: "description": "super complex model for tests", "input": {"field_names": ["cost", "product"]}, "inference_config": { - "learn_to_rank": { + "learning_to_rank": { } }, "definition": { @@ -146,7 +146,7 @@ setup: { "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } } - match: { hits.hits.0._score: 17.0 } @@ -162,7 +162,7 @@ setup: "query": {"term": {"product": "Laptop"}}, "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } } - match: { hits.hits.0._score: 6.0 } @@ -182,7 +182,7 @@ setup: { "rescore": { "window_size": 2, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } } - match: { hits.hits.0._score: 17.0 } @@ -209,7 +209,7 @@ setup: }, { "window_size": 3, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } }, { "window_size": 2, @@ -232,7 +232,7 @@ setup: { "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-missing" } + "learning_to_rank": { "model_id": "ltr-missing" } } } --- @@ -245,7 +245,7 @@ setup: "query": {"term": {"product": "Speaker"}}, "rescore": { "window_size": 10, - "learn_to_rank": { "model_id": "ltr-model" } + "learning_to_rank": { "model_id": "ltr-model" } } } - length: { hits.hits: 0 } From ccf92e42f1ea6b941c35a66daf3ed260e37808b4 Mon Sep 17 00:00:00 2001 From: Matteo Piergiovanni <134913285+piergm@users.noreply.github.com> Date: Tue, 5 Dec 2023 08:43:34 +0100 Subject: [PATCH 199/263] Node stats as metrics (#102248) In ES there are node stats that can be retrieved via API call (`GET /_nodes/stats`) but not scraped by Metricbeat. This PR register as metrics some of those stats. The API has the capability to aggregate stats of all the nodes connected to the cluster. We decided instead each node will report its own stats in order not to hit the wire and cause unwanted latencies. All the metrics are registered as either `LongAsyncCounter` or `LongGauge` both of which have a callback reporting the total value for a metric and not the delta. We have in place a lazy cache that expires after 1 minute for `NodeStats` in order not to recalculate it for every metric callback. List of metrics that this PR will introduce: - es.node.stats.indices.get.total - es.node.stats.indices.get.time - es.node.stats.indices.search.fetch.total - es.node.stats.indices.search.fetch.time - es.node.stats.indices.merge.total - es.node.stats.indices.merge.time - es.node.stats.indices.translog.operations - es.node.stats.indices.translog.size - es.node.stats.indices.translog.uncommitted_operations - es.node.stats.indices.translog.uncommitted_size - es.node.stats.indices.translog.earliest_last_modified_age - es.node.stats.transport.rx_size - es.node.stats.transport.tx_size - es.node.stats.jvm.mem.pools.young.used - es.node.stats.jvm.mem.pools.survivor.used - es.node.stats.jvm.mem.pools.old.used - es.node.stats.fs.io_stats.io_time.total --- docs/changelog/102248.yaml | 5 + .../monitor/metrics/NodeMetrics.java | 311 ++++++++++++++++++ .../java/org/elasticsearch/node/Node.java | 5 + .../elasticsearch/node/NodeConstruction.java | 4 + 4 files changed, 325 insertions(+) create mode 100644 docs/changelog/102248.yaml create mode 100644 server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java diff --git a/docs/changelog/102248.yaml b/docs/changelog/102248.yaml new file mode 100644 index 0000000000000..854e8afde4086 --- /dev/null +++ b/docs/changelog/102248.yaml @@ -0,0 +1,5 @@ +pr: 102248 +summary: Node stats as metrics +area: Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java new file mode 100644 index 0000000000000..e36f1a085fbde --- /dev/null +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -0,0 +1,311 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.monitor.metrics; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.util.SingleObjectCache; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.monitor.jvm.GcNames; +import org.elasticsearch.monitor.jvm.JvmStats; +import org.elasticsearch.node.NodeService; +import org.elasticsearch.telemetry.metric.LongWithAttributes; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * NodeMetrics monitors various statistics of an Elasticsearch node and exposes them as metrics through + * the provided MeterRegistry. It includes counters for indices operations, memory usage, transport statistics, + * and more. The metrics are periodically updated based on a schedule. + */ +public class NodeMetrics extends AbstractLifecycleComponent { + private final Logger logger = LogManager.getLogger(NodeMetrics.class); + private final MeterRegistry registry; + private final NodeService nodeService; + private final List metrics; + private NodeStatsCache stats; + + /** + * Constructs a new NodeMetrics instance. + * + * @param meterRegistry The MeterRegistry used to register metrics. + * @param nodeService The NodeService for interacting with the Elasticsearch node and extracting statistics. + */ + public NodeMetrics(MeterRegistry meterRegistry, NodeService nodeService) { + this.registry = meterRegistry; + this.nodeService = nodeService; + this.metrics = new ArrayList<>(17); + } + + /** + * Registers async metrics in the provided MeterRegistry. We are using the lazy NodeStatCache to retrieve + * the NodeStats once per pool period instead of for every callback if we were not to use it. + * + * @param registry The MeterRegistry used to register and collect metrics. + */ + private void registerAsyncMetrics(MeterRegistry registry) { + // Agent should poll stats every 4 minutes and being this cache is lazy we need a + // number high enough so that the cache does not update during the same poll + // period and that expires before a new poll period, therefore we choose 1 minute. + this.stats = new NodeStatsCache(TimeValue.timeValueMinutes(1)); + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.get.total", + "Total number of get operations", + "operation", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getCount()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.get.time", + "Time in milliseconds spent performing get operations.", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getTimeInMillis()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.search.fetch.total", + "Total number of fetch operations.", + "operation", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchCount()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.search.fetch.time", + "Time in milliseconds spent performing fetch operations.", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchTimeInMillis()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.merge.total", + "Total number of merge operations.", + "operation", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotal()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.merge.time", + "Time in milliseconds spent performing merge operations.", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotalTimeInMillis()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.indices.translog.operations", + "Number of transaction log operations.", + "operation", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().estimatedNumberOfOperations()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.indices.translog.size", + "Size, in bytes, of the transaction log.", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getTranslogSizeInBytes()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.indices.translog.uncommitted_operations", + "Number of uncommitted transaction log operations.", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedOperations()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.indices.translog.uncommitted_size", + "Size, in bytes, of uncommitted transaction log operations.", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedSizeInBytes()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.indices.translog.earliest_last_modified_age", + "Earliest last modified age for the transaction log.", + "time", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getEarliestLastModifiedAge()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.transport.rx_size", + "Size, in bytes, of RX packets received by the node during internal cluster communication.", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getRxSize().getBytes()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.transport.tx_size", + "Size, in bytes, of TX packets sent by the node during internal cluster communication.", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getTxSize().getBytes()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.jvm.mem.pools.young.used", + "Memory, in bytes, used by the young generation heap.", + "bytes", + () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.YOUNG)) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.jvm.mem.pools.survivor.used", + "Memory, in bytes, used by the survivor space.", + "bytes", + () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.SURVIVOR)) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.node.stats.jvm.mem.pools.old.used", + "Memory, in bytes, used by the old generation heap.", + "bytes", + () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.OLD)) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.node.stats.fs.io_stats.io_time.total", + "The total time in millis spent performing I/O operations across all devices used by Elasticsearch.", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getFs().getIoStats().getTotalIOTimeMillis()) + ) + ); + } + + /** + * Retrieves the bytes used by a specific garbage collection generation from the provided JvmStats.Mem. + * + * @param mem The JvmStats.Mem containing memory pool information. + * @param name The name of the garbage collection generation (e.g., "young", "survivor", "old"). + * @return The number of bytes used by the specified garbage collection generation. + */ + private long bytesUsedByGCGen(JvmStats.Mem mem, String name) { + long bytesUsed = 0; + for (JvmStats.MemoryPool pool : mem) { + if (pool.getName().equals(name)) { + bytesUsed = pool.getUsed().getBytes(); + } + } + return bytesUsed; + } + + /** + * Retrieves the current NodeStats for the Elasticsearch node. + * + * @return The current NodeStats. + */ + private NodeStats getNodeStats() { + CommonStatsFlags flags = new CommonStatsFlags( + CommonStatsFlags.Flag.Get, + CommonStatsFlags.Flag.Search, + CommonStatsFlags.Flag.Merge, + CommonStatsFlags.Flag.Translog + ); + return nodeService.stats( + flags, + true, + false, + false, + true, + false, + true, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false + ); + } + + @Override + protected void doStart() { + registerAsyncMetrics(registry); + } + + @Override + protected void doStop() { + stats.stopRefreshing(); + } + + @Override + protected void doClose() throws IOException { + metrics.forEach(metric -> { + try { + metric.close(); + } catch (Exception e) { + logger.warn("metrics close() method should not throw Exception", e); + } + }); + } + + /** + * A very simple NodeStats cache that allows non-blocking refresh calls + * lazily triggered by expiry time. When getOrRefresh() is called either + * the cached NodeStats is returned if refreshInterval didn't expire or + * refresh() is called, cache is updated and the new instance returned. + */ + private class NodeStatsCache extends SingleObjectCache { + private boolean refresh; + + NodeStatsCache(TimeValue interval) { + super(interval, getNodeStats()); + this.refresh = true; + } + + @Override + protected NodeStats refresh() { + return refresh ? getNodeStats() : getNoRefresh(); + } + + public void stopRefreshing() { + this.refresh = false; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 1c1b9745befe8..67c604248a7f4 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -60,6 +60,7 @@ import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.monitor.fs.FsHealthService; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.monitor.metrics.NodeMetrics; import org.elasticsearch.node.internal.TerminationHandler; import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.ClusterPlugin; @@ -419,6 +420,8 @@ public void onTimeout(TimeValue timeout) { } } + injector.getInstance(NodeMetrics.class).start(); + logger.info("started {}", transportService.getLocalNode()); pluginsService.filterPlugins(ClusterPlugin.class).forEach(ClusterPlugin::onNodeStarted); @@ -462,6 +465,7 @@ private void stop() { stopIfStarted(GatewayService.class); stopIfStarted(SearchService.class); stopIfStarted(TransportService.class); + stopIfStarted(NodeMetrics.class); pluginLifecycleComponents.forEach(Node::stopIfStarted); // we should stop this last since it waits for resources to get released @@ -530,6 +534,7 @@ public synchronized void close() throws IOException { toClose.add(injector.getInstance(SearchService.class)); toClose.add(() -> stopWatch.stop().start("transport")); toClose.add(injector.getInstance(TransportService.class)); + toClose.add(injector.getInstance(NodeMetrics.class)); if (ReadinessService.enabled(environment)) { toClose.add(injector.getInstance(ReadinessService.class)); } diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 19a1310ed86aa..7a0d8c941e50f 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -125,6 +125,7 @@ import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.fs.FsHealthService; import org.elasticsearch.monitor.jvm.JvmInfo; +import org.elasticsearch.monitor.metrics.NodeMetrics; import org.elasticsearch.node.internal.TerminationHandler; import org.elasticsearch.node.internal.TerminationHandlerProvider; import org.elasticsearch.persistent.PersistentTasksClusterService; @@ -963,6 +964,8 @@ record PluginServiceInstances( repositoryService ); + final NodeMetrics nodeMetrics = new NodeMetrics(telemetryProvider.getMeterRegistry(), nodeService); + final SearchService searchService = serviceProvider.newSearchService( pluginsService, clusterService, @@ -1039,6 +1042,7 @@ record PluginServiceInstances( b.bind(SearchPhaseController.class).toInstance(new SearchPhaseController(searchService::aggReduceContextBuilder)); b.bind(Transport.class).toInstance(transport); b.bind(TransportService.class).toInstance(transportService); + b.bind(NodeMetrics.class).toInstance(nodeMetrics); b.bind(NetworkService.class).toInstance(networkService); b.bind(IndexMetadataVerifier.class).toInstance(indexMetadataVerifier); b.bind(ClusterInfoService.class).toInstance(clusterInfoService); From 43c320f75fd4803fdf9c0902edc2ce609491723e Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 5 Dec 2023 08:53:52 +0100 Subject: [PATCH 200/263] Fix layout for MV_EXPAND (#102916) --- docs/changelog/102916.yaml | 6 ++++++ .../src/main/resources/mv_expand.csv-spec | 10 ++++++++++ .../elasticsearch/xpack/esql/planner/Layout.java | 9 +++++++++ .../xpack/esql/planner/LocalExecutionPlanner.java | 15 ++------------- 4 files changed, 27 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/102916.yaml diff --git a/docs/changelog/102916.yaml b/docs/changelog/102916.yaml new file mode 100644 index 0000000000000..3943f34d91221 --- /dev/null +++ b/docs/changelog/102916.yaml @@ -0,0 +1,6 @@ +pr: 102916 +summary: Fix layout for MV_EXPAND +area: ES|QL +type: bug +issues: + - 102912 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec index c681a1a7e977c..a3bc9c6c6dcf6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec @@ -306,3 +306,13 @@ a:long | b:long | c:long | gender:keyword | str:keyword | x:key 57 |57 |57 |M |"57,M" |M 0 |10 |10 |null |null |null ; + + +//see https://github.com/elastic/elasticsearch/issues/102912 +statsDissectThatOverwritesAndMvExpand#[skip:-8.11.99] +row a = "a", b = 1 | stats e = min(b) by a | dissect a "%{e}" | mv_expand e; + +a:keyword | e:keyword +a | a +; + diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java index 871d3751b225d..97885a060d639 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Layout.java @@ -119,5 +119,14 @@ public Layout build() { } return new DefaultLayout(Collections.unmodifiableMap(layout), numberOfChannels); } + + public void replace(NameId id, NameId id1) { + for (ChannelSet channel : this.channels) { + if (channel != null && channel.nameIds.contains(id)) { + channel.nameIds.remove(id); + channel.nameIds.add(id1); + } + } + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 3d377497e17af..c531fd01c2a40 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -581,20 +581,9 @@ private PhysicalOperation planLimit(LimitExec limit, LocalExecutionPlannerContex private PhysicalOperation planMvExpand(MvExpandExec mvExpandExec, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(mvExpandExec.child(), context); - List childOutput = mvExpandExec.child().output(); int blockSize = 5000;// TODO estimate row size and use context.pageSize() - - Layout.Builder layout = new Layout.Builder(); - List inverse = source.layout.inverse(); - var expandedName = mvExpandExec.expanded().name(); - for (int index = 0; index < inverse.size(); index++) { - if (childOutput.get(index).name().equals(expandedName)) { - layout.append(mvExpandExec.expanded()); - } else { - layout.append(inverse.get(index)); - } - } - + Layout.Builder layout = source.layout.builder(); + layout.replace(mvExpandExec.target().id(), mvExpandExec.expanded().id()); return source.with( new MvExpandOperator.Factory(source.layout.get(mvExpandExec.target().id()).channel(), blockSize), layout.build() From 26905047fb62ba2932ccd85f0e958bc8b9205a5b Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 5 Dec 2023 09:30:50 +0100 Subject: [PATCH 201/263] Rework minio test fixture and its usages (#102707) * Update minio fixture and usages to rely on new test cluster framework and testcontainer * Cache test fixture resources during packer caching Add logic that ensures we resolve docker images resolved from docker registry as part of our packer cache setup. --- build-tools-internal/build.gradle | 6 + .../internal/RestrictedBuildApiService.java | 2 - .../packer/CacheCacheableTestFixtures.java | 115 ++++++++++++++++++ .../CacheTestFixtureResourcesPlugin.java | 45 +++++++ build-tools-internal/version.properties | 4 + gradle/build.versions.toml | 1 + gradle/verification-metadata.xml | 5 + test/fixtures/geoip-fixture/build.gradle | 2 - test/fixtures/minio-fixture/Dockerfile | 9 -- test/fixtures/minio-fixture/build.gradle | 8 +- .../fixtures/minio-fixture/docker-compose.yml | 35 ------ .../test/fixtures/CacheableTestFixture.java | 13 ++ .../fixtures/minio/MinioTestContainer.java | 15 ++- .../qa/minio/build.gradle | 41 ++----- .../minio/MinioSearchableSnapshotsIT.java | 45 +++++-- .../qa/hdfs/build.gradle | 2 +- .../qa/minio/build.gradle | 30 +---- .../testkit/MinioSnapshotRepoTestKitIT.java | 42 +++++-- 18 files changed, 290 insertions(+), 130 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java delete mode 100644 test/fixtures/minio-fixture/Dockerfile delete mode 100644 test/fixtures/minio-fixture/docker-compose.yml create mode 100644 test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 66001e66f2486..738b3f62803ab 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -195,6 +195,10 @@ gradlePlugin { id = 'elasticsearch.legacy-yaml-rest-test' implementationClass = 'org.elasticsearch.gradle.internal.test.rest.LegacyYamlRestTestPlugin' } + cacheTestFixtures { + id = 'elasticsearch.cache-test-fixtures' + implementationClass = 'org.elasticsearch.gradle.internal.packer.CacheTestFixtureResourcesPlugin' + } yamlRestTest { id = 'elasticsearch.internal-yaml-rest-test' implementationClass = 'org.elasticsearch.gradle.internal.test.rest.InternalYamlRestTestPlugin' @@ -288,6 +292,8 @@ dependencies { api buildLibs.httpcore compileOnly buildLibs.checkstyle + compileOnly buildLibs.reflections + runtimeOnly "org.elasticsearch.gradle:reaper:$version" testImplementation buildLibs.checkstyle testImplementation buildLibs.wiremock diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java index 2d5dc65a43fae..8042bdd64dabb 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java @@ -139,7 +139,6 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:native-multi-node-tests"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:ml:qa:single-node-tests"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:searchable-snapshots:qa:hdfs"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:searchable-snapshots:qa:minio"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:searchable-snapshots:qa:rest"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:searchable-snapshots:qa:url"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:security:qa:tls-basic"); @@ -150,7 +149,6 @@ private static ListMultimap, String> createLegacyRestTestBasePluginUsag map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-based-recoveries:qa:fs"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-based-recoveries:qa:license-enforcing"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-repo-test-kit:qa:hdfs"); - map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-repo-test-kit:qa:minio"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:snapshot-repo-test-kit:qa:rest"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:sql:qa:jdbc:multi-node"); map.put(LegacyRestTestBasePlugin.class, ":x-pack:plugin:sql:qa:jdbc:no-sql"); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java new file mode 100644 index 0000000000000..a01b1c28a851f --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.packer; + +import org.gradle.api.DefaultTask; +import org.gradle.api.InvalidUserDataException; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.FileCollection; +import org.gradle.api.tasks.CompileClasspath; +import org.gradle.api.tasks.TaskAction; +import org.gradle.workers.WorkAction; +import org.gradle.workers.WorkParameters; +import org.gradle.workers.WorkQueue; +import org.gradle.workers.WorkerExecutor; +import org.reflections.Reflections; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.util.ClasspathHelper; +import org.reflections.util.ConfigurationBuilder; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.util.LinkedHashSet; +import java.util.Set; + +import javax.inject.Inject; + +public abstract class CacheCacheableTestFixtures extends DefaultTask { + + @CompileClasspath + public abstract ConfigurableFileCollection getClasspath(); + + @Inject + public abstract WorkerExecutor getWorkerExecutor(); + + /** + * Executes the forbidden apis task. + */ + @TaskAction + public void checkForbidden() { + WorkQueue workQueue = getWorkerExecutor().classLoaderIsolation(spec -> spec.getClasspath().from(getClasspath())); + workQueue.submit(CacheTestFixtureWorkAction.class, params -> params.getClasspath().setFrom(getClasspath())); + } + + interface Parameters extends WorkParameters { + ConfigurableFileCollection getClasspath(); + } + + abstract static class CacheTestFixtureWorkAction implements WorkAction { + + @Inject + @SuppressWarnings("checkstyle:RedundantModifier") + public CacheTestFixtureWorkAction() {} + + @Override + public void execute() { + final URLClassLoader urlLoader = createClassLoader(getParameters().getClasspath()); + try { + Reflections reflections = new Reflections( + new ConfigurationBuilder().setUrls(ClasspathHelper.forPackage("org.elasticsearch.test.fixtures")) + .setScanners(new SubTypesScanner()) + ); + + Class ifClass = Class.forName("org.elasticsearch.test.fixtures.CacheableTestFixture"); + Set> classes = (Set>) reflections.getSubTypesOf(ifClass); + + for (Class cacheableTestFixtureClazz : classes) { + Object o = cacheableTestFixtureClazz.getDeclaredConstructor().newInstance(); + Method cacheMethod = cacheableTestFixtureClazz.getMethod("cache"); + cacheMethod.invoke(o); + } + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + // Close the classloader to free resources: + try { + if (urlLoader != null) urlLoader.close(); + } catch (IOException ioe) { + // getLogger().warn("Cannot close classloader: ".concat(ioe.toString())); + } + } + } + + private URLClassLoader createClassLoader(FileCollection classpath) { + if (classpath == null) { + throw new InvalidUserDataException("Missing 'classesDirs' or 'classpath' property."); + } + + final Set cpElements = new LinkedHashSet<>(); + cpElements.addAll(classpath.getFiles()); + final URL[] urls = new URL[cpElements.size()]; + try { + int i = 0; + for (final File cpElement : cpElements) { + urls[i++] = cpElement.toURI().toURL(); + } + assert i == urls.length; + } catch (MalformedURLException mfue) { + throw new InvalidUserDataException("Failed to build classpath URLs.", mfue); + } + + return URLClassLoader.newInstance(urls, ClassLoader.getSystemClassLoader()); + } + + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java new file mode 100644 index 0000000000000..f8ab8eef1004c --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.packer; + +import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.internal.ResolveAllDependencies; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.artifacts.dsl.DependencyHandler; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginExtension; + +public class CacheTestFixtureResourcesPlugin implements Plugin { + + public static final String CACHE_TEST_FIXTURES = "cacheTestFixtures"; + + @Override + public void apply(Project project) { + + var cacheTestFixturesConfiguration = project.getConfigurations().create(CACHE_TEST_FIXTURES); + cacheTestFixturesConfiguration.defaultDependencies(deps -> { + DependencyHandler dependencyHandler = project.getDependencies(); + deps.add(dependencyHandler.create("org.reflections:reflections:" + VersionProperties.getVersions().get("reflections"))); + deps.add(dependencyHandler.create("org.javassist:javassist:" + VersionProperties.getVersions().get("javassist"))); + }); + project.getPlugins().withType(JavaPlugin.class, javaPlugin -> { + var cacheTestFixtures = project.getTasks().register(CACHE_TEST_FIXTURES, CacheCacheableTestFixtures.class, (t) -> { + var testSourceSet = project.getExtensions() + .getByType(JavaPluginExtension.class) + .getSourceSets() + .getByName(JavaPlugin.TEST_TASK_NAME); + t.getClasspath().from(cacheTestFixturesConfiguration); + t.getClasspath().from(testSourceSet.getRuntimeClasspath()); + }); + project.getTasks().withType(ResolveAllDependencies.class).configureEach(r -> r.dependsOn(cacheTestFixtures)); + }); + + } +} diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 9763cef8aefeb..f0e599a9c0e87 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -47,6 +47,10 @@ dockerJava = 3.3.4 ductTape = 1.0.8 commonsCompress = 1.24.0 +# packer caching build logic +reflections = 0.9.12 +javassist = 3.28.0-GA + # benchmark dependencies jmh = 1.26 diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index e8d94ce624dbb..f1965fc5400ea 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -37,6 +37,7 @@ junit5-vintage = { group = "org.junit.vintage", name="junit-vintage-engine", ver maven-model = "org.apache.maven:maven-model:3.6.2" mockito-core = "org.mockito:mockito-core:1.9.5" nebula-info = "com.netflix.nebula:gradle-info-plugin:11.3.3" +reflections = "org.reflections:reflections:0.9.12" shadow-plugin = "com.github.johnrengelman:shadow:8.1.1" spock-core = { group = "org.spockframework", name="spock-core", version.ref="spock" } spock-junit4 = { group = "org.spockframework", name="spock-junit4", version.ref="spock" } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 9d383c426cb74..7f672ece21f66 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -4092,6 +4092,11 @@ + + + + + diff --git a/test/fixtures/geoip-fixture/build.gradle b/test/fixtures/geoip-fixture/build.gradle index 377deb5e8a872..a7805c68b08e9 100644 --- a/test/fixtures/geoip-fixture/build.gradle +++ b/test/fixtures/geoip-fixture/build.gradle @@ -18,5 +18,3 @@ dependencies { exclude module: 'hamcrest-core' } } - -tasks.named("test") { enabled = false } diff --git a/test/fixtures/minio-fixture/Dockerfile b/test/fixtures/minio-fixture/Dockerfile deleted file mode 100644 index 6a94355951683..0000000000000 --- a/test/fixtures/minio-fixture/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM minio/minio:RELEASE.2021-03-01T04-20-55Z - -ARG bucket -ARG accessKey -ARG secretKey - -RUN mkdir -p /minio/data/${bucket} -ENV MINIO_ACCESS_KEY=${accessKey} -ENV MINIO_SECRET_KEY=${secretKey} diff --git a/test/fixtures/minio-fixture/build.gradle b/test/fixtures/minio-fixture/build.gradle index 8673c51d46038..66613809068f7 100644 --- a/test/fixtures/minio-fixture/build.gradle +++ b/test/fixtures/minio-fixture/build.gradle @@ -5,9 +5,9 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'elasticsearch.test.fixtures' apply plugin: 'java' apply plugin: 'elasticsearch.java' +apply plugin: 'elasticsearch.cache-test-fixtures' description = 'Fixture for MinIO Storage service' @@ -23,10 +23,14 @@ dependencies { implementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" implementation "org.slf4j:slf4j-api:${versions.slf4j}" implementation "com.github.docker-java:docker-java-api:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-transport-zerodep:${versions.dockerJava}" runtimeOnly "com.github.docker-java:docker-java-transport:${versions.dockerJava}" runtimeOnly "com.github.docker-java:docker-java-core:${versions.dockerJava}" runtimeOnly "org.apache.commons:commons-compress:${versions.commonsCompress}" runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" - runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" + + // ensure we have proper logging during when used in tests + runtimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" + runtimeOnly "org.hamcrest:hamcrest:${versions.hamcrest}" } diff --git a/test/fixtures/minio-fixture/docker-compose.yml b/test/fixtures/minio-fixture/docker-compose.yml deleted file mode 100644 index c65ed2f070703..0000000000000 --- a/test/fixtures/minio-fixture/docker-compose.yml +++ /dev/null @@ -1,35 +0,0 @@ -version: '3' -services: - minio-fixture: - build: - context: . - args: - bucket: "bucket" - accessKey: "s3_test_access_key" - secretKey: "s3_test_secret_key" - dockerfile: Dockerfile - ports: - - "9000" - command: ["server", "/minio/data"] - minio-fixture-other: - build: - context: . - args: - bucket: "bucket" - accessKey: "s3_test_access_key" - secretKey: "s3_test_secret_key" - dockerfile: Dockerfile - ports: - - "9000" - command: ["server", "/minio/data"] - minio-fixture-repository-test-kit: - build: - context: . - args: - bucket: "bucket" - accessKey: "s3_test_access_key" - secretKey: "s3_test_secret_key" - dockerfile: Dockerfile - ports: - - "9000" - command: ["server", "/minio/data"] diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java new file mode 100644 index 0000000000000..e824cd612c1b4 --- /dev/null +++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.fixtures; + +public interface CacheableTestFixture { + void cache(); +} diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java index fcb95890ace31..a7e6ba8d785a1 100644 --- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java +++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java @@ -8,13 +8,15 @@ package org.elasticsearch.test.fixtures.minio; +import org.elasticsearch.test.fixtures.CacheableTestFixture; import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; import org.junit.rules.TestRule; import org.testcontainers.images.builder.ImageFromDockerfile; -public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer implements TestRule { +public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer implements TestRule, CacheableTestFixture { private static final int servicePort = 9000; + public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2021-03-01T04-20-55Z"; private final boolean enabled; public MinioTestContainer() { @@ -24,7 +26,7 @@ public MinioTestContainer() { public MinioTestContainer(boolean enabled) { super( new ImageFromDockerfile().withDockerfileFromBuilder( - builder -> builder.from("minio/minio:RELEASE.2021-03-01T04-20-55Z") + builder -> builder.from(DOCKER_BASE_IMAGE) .env("MINIO_ACCESS_KEY", "s3_test_access_key") .env("MINIO_SECRET_KEY", "s3_test_secret_key") .run("mkdir -p /minio/data/bucket") @@ -48,4 +50,13 @@ public void start() { public String getAddress() { return "http://127.0.0.1:" + getMappedPort(servicePort); } + + public void cache() { + try { + start(); + stop(); + } catch (RuntimeException e) { + logger().warn("Error while caching container images.", e); + } + } } diff --git a/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle b/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle index 860e42378dcd9..2d6b6e80d4bdf 100644 --- a/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/minio/build.gradle @@ -1,13 +1,17 @@ import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' -final Project fixture = project(':test:fixtures:minio-fixture') - dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('searchable-snapshots')))) + javaRestTestImplementation project(':test:test-clusters') + javaRestTestImplementation project(":test:framework") + javaRestTestImplementation project(':test:fixtures:minio-fixture') +} + +tasks.named('javaRestTest') { + usesDefaultDistribution() } restResources { @@ -15,32 +19,3 @@ restResources { include 'indices', 'search', 'bulk', 'snapshot', 'nodes', '_common', 'searchable_snapshots' } } - -testFixtures.useFixture(fixture.path, 'minio-fixture-other') -def fixtureAddress = { - int ephemeralPort = fixture.postProcessFixture.ext."test.fixtures.minio-fixture-other.tcp.9000" - assert ephemeralPort > 0 - '127.0.0.1:' + ephemeralPort -} - -tasks.named("javaRestTest").configure { - systemProperty 'test.minio.bucket', 'bucket' - systemProperty 'test.minio.base_path', 'searchable_snapshots_tests' -} - -testClusters.matching { it.name == "javaRestTest" }.configureEach { - testDistribution = 'DEFAULT' - - keystore 's3.client.searchable_snapshots.access_key', 's3_test_access_key' - keystore 's3.client.searchable_snapshots.secret_key', 's3_test_secret_key' - setting 'xpack.license.self_generated.type', 'trial' - setting 's3.client.searchable_snapshots.protocol', 'http' - setting 's3.client.searchable_snapshots.endpoint', { "${-> fixtureAddress()}" }, IGNORE_VALUE - - setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' - setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' - setting 'xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive', '0ms' - - setting 'xpack.security.enabled', 'false' -} - diff --git a/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java index eded3824d4f63..5c2b19fe75a07 100644 --- a/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/minio/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/minio/MinioSearchableSnapshotsIT.java @@ -6,27 +6,54 @@ */ package org.elasticsearch.xpack.searchablesnapshots.minio; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.minio.MinioTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.xpack.searchablesnapshots.AbstractSearchableSnapshotsRestTestCase; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; -import static org.hamcrest.Matchers.blankOrNullString; -import static org.hamcrest.Matchers.not; - +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class MinioSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestTestCase { + public static final MinioTestContainer minioFixture = new MinioTestContainer(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .keystore("s3.client.searchable_snapshots.access_key", "s3_test_access_key") + .keystore("s3.client.searchable_snapshots.secret_key", "s3_test_secret_key") + .setting("xpack.license.self_generated.type", "trial") + .setting("s3.client.searchable_snapshots.protocol", () -> "http") + .setting("s3.client.searchable_snapshots.endpoint", minioFixture::getAddress) + .setting("xpack.searchable.snapshot.shared_cache.size", "16MB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") + .setting("xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive", "0ms") + .setting("xpack.security.enabled", "false") + .setting("xpack.ml.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(minioFixture).around(cluster); + @Override protected String writeRepositoryType() { return "s3"; } @Override - protected Settings writeRepositorySettings() { - final String bucket = System.getProperty("test.minio.bucket"); - assertThat(bucket, not(blankOrNullString())); - - final String basePath = System.getProperty("test.minio.base_path"); - assertThat(basePath, not(blankOrNullString())); + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override + protected Settings writeRepositorySettings() { + final String bucket = "bucket"; + final String basePath = "searchable_snapshots_tests"; return Settings.builder().put("client", "searchable_snapshots").put("bucket", bucket).put("base_path", basePath).build(); } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index eafeb22106ff8..90a6f4ada32e0 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -27,7 +27,7 @@ final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs') dependencies { javaRestTestImplementation testArtifact(project(xpackModule('snapshot-repo-test-kit'))) - javaRestTestImplementation hdfsRepoPluginProject + javaRestTestImplementation project(':plugins:repository-hdfs') } restResources { diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle index 225e0146a6ecb..6e8993a7a141d 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/build.gradle @@ -7,14 +7,12 @@ import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' -final Project fixture = project(':test:fixtures:minio-fixture') - dependencies { javaRestTestImplementation testArtifact(project(xpackModule('snapshot-repo-test-kit'))) + javaRestTestImplementation project(':test:fixtures:minio-fixture') } restResources { @@ -23,26 +21,6 @@ restResources { } } -testFixtures.useFixture(fixture.path, 'minio-fixture-repository-test-kit') -def fixtureAddress = { - int ephemeralPort = fixture.postProcessFixture.ext."test.fixtures.minio-fixture-repository-test-kit.tcp.9000" - assert ephemeralPort > 0 - '127.0.0.1:' + ephemeralPort -} - -tasks.named("javaRestTest").configure { - systemProperty 'test.minio.bucket', 'bucket' - systemProperty 'test.minio.base_path', 'repository_test_kit_tests' -} - -testClusters.matching { it.name == "javaRestTest" }.configureEach { - testDistribution = 'DEFAULT' - - keystore 's3.client.repository_test_kit.access_key', 's3_test_access_key' - keystore 's3.client.repository_test_kit.secret_key', 's3_test_secret_key' - setting 's3.client.repository_test_kit.protocol', 'http' - setting 's3.client.repository_test_kit.endpoint', { "${-> fixtureAddress()}" }, IGNORE_VALUE - - setting 'xpack.security.enabled', 'false' +tasks.named('javaRestTest') { + usesDefaultDistribution() } - diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/MinioSnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/MinioSnapshotRepoTestKitIT.java index 16dc843690d05..3e58a8d89ff31 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/MinioSnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/minio/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/MinioSnapshotRepoTestKitIT.java @@ -6,26 +6,50 @@ */ package org.elasticsearch.repositories.blobstore.testkit; -import org.elasticsearch.common.settings.Settings; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; -import static org.hamcrest.Matchers.blankOrNullString; -import static org.hamcrest.Matchers.not; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.fixtures.minio.MinioTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class MinioSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestTestCase { + public static final MinioTestContainer minioFixture = new MinioTestContainer(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .keystore("s3.client.repository_test_kit.access_key", "s3_test_access_key") + .keystore("s3.client.repository_test_kit.secret_key", "s3_test_secret_key") + .setting("s3.client.repository_test_kit.protocol", () -> "http") + .setting("s3.client.repository_test_kit.endpoint", minioFixture::getAddress) + .setting("xpack.security.enabled", "false") + // Additional tracing related to investigation into https://github.com/elastic/elasticsearch/issues/102294 + .setting("xpack.ml.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(minioFixture).around(cluster); + @Override protected String repositoryType() { return "s3"; } @Override - protected Settings repositorySettings() { - final String bucket = System.getProperty("test.minio.bucket"); - assertThat(bucket, not(blankOrNullString())); - - final String basePath = System.getProperty("test.minio.base_path"); - assertThat(basePath, not(blankOrNullString())); + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + @Override + protected Settings repositorySettings() { + final String bucket = "bucket"; + final String basePath = "repository_test_kit_tests"; return Settings.builder().put("client", "repository_test_kit").put("bucket", bucket).put("base_path", basePath).build(); } } From 86b80a36c431031f880f43d279a38bd936fd2e51 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 5 Dec 2023 09:16:46 +0000 Subject: [PATCH 202/263] Enable retry on rate limit error from OpenAI(#102922) Extract token and request usage from the OpenAI headers --- .../http/retry/BaseResponseHandler.java | 2 +- .../openai/OpenAiResponseHandler.java | 42 ++++++++- .../openai/OpenAiResponseHandlerTests.java | 89 +++++++++++++++++-- 3 files changed, 124 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java index 31d987118c28d..4e36d6a59a5e6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -79,7 +79,7 @@ protected Exception buildError(String message, HttpRequestBase request, HttpResu ); } - static RestStatus toRestStatus(int statusCode) { + public static RestStatus toRestStatus(int statusCode) { RestStatus code = null; if (statusCode < 500) { code = RestStatus.fromCode(statusCode); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index 7609b734db4f5..43c234a6809c4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.inference.external.openai; +import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; @@ -22,6 +24,17 @@ import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; public class OpenAiResponseHandler extends BaseResponseHandler { + /** + * Rate limit headers taken from https://platform.openai.com/docs/guides/rate-limits/rate-limits-in-headers + */ + // The maximum number of requests that are permitted before exhausting the rate limit. + static final String REQUESTS_LIMIT = "x-ratelimit-limit-requests"; + // The maximum number of tokens that are permitted before exhausting the rate limit. + static final String TOKENS_LIMIT = "x-ratelimit-limit-tokens"; + // The remaining number of requests that are permitted before exhausting the rate limit. + static final String REMAINING_REQUESTS = "x-ratelimit-remaining-requests"; + // The remaining number of tokens that are permitted before exhausting the rate limit. + static final String REMAINING_TOKENS = "x-ratelimit-remaining-tokens"; public OpenAiResponseHandler(String requestType, CheckedFunction parseFunction) { super(requestType, parseFunction, OpenAiErrorResponseEntity::fromResponse); @@ -52,7 +65,7 @@ void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throw if (statusCode >= 500) { throw new RetryException(false, buildError(SERVER_ERROR, request, result)); } else if (statusCode == 429) { - throw new RetryException(false, buildError(RATE_LIMIT, request, result)); // TODO back off and retry + throw new RetryException(true, buildError(buildRateLimitErrorMessage(request, result), request, result)); } else if (statusCode == 401) { throw new RetryException(false, buildError(AUTHENTICATION, request, result)); } else if (statusCode >= 300 && statusCode < 400) { @@ -61,4 +74,31 @@ void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throw throw new RetryException(false, buildError(UNSUCCESSFUL, request, result)); } } + + static String buildRateLimitErrorMessage(HttpRequestBase request, HttpResult result) { + var response = result.response(); + int statusCode = result.response().getStatusLine().getStatusCode(); + var tokenLimit = getFirstHeaderOrUnknown(response, TOKENS_LIMIT); + var remainingTokens = getFirstHeaderOrUnknown(response, REMAINING_TOKENS); + var requestLimit = getFirstHeaderOrUnknown(response, REQUESTS_LIMIT); + var remainingRequests = getFirstHeaderOrUnknown(response, REMAINING_REQUESTS); + + var usageMessage = Strings.format( + "Token limit [%s], remaining tokens [%s]. Request limit [%s], remaining requests [%s]", + tokenLimit, + remainingTokens, + requestLimit, + remainingRequests + ); + + return RATE_LIMIT + ". " + usageMessage; + } + + private static String getFirstHeaderOrUnknown(HttpResponse response, String name) { + var header = response.getFirstHeader(name); + if (header != null && header.getElements().length > 0) { + return header.getElements()[0].getName(); + } + return "unknown"; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java index 56495b053e172..e3698701ac1f0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java @@ -7,9 +7,13 @@ package org.elasticsearch.xpack.inference.external.openai; +import org.apache.http.Header; +import org.apache.http.HeaderElement; import org.apache.http.HttpResponse; +import org.apache.http.RequestLine; import org.apache.http.StatusLine; import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.message.BasicHeader; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; @@ -18,6 +22,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.core.Is.is; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -28,11 +33,12 @@ public void testCheckForFailureStatusCode() { var httpResponse = mock(HttpResponse.class); when(httpResponse.getStatusLine()).thenReturn(statusLine); + var header = mock(Header.class); + when(header.getElements()).thenReturn(new HeaderElement[] {}); + when(httpResponse.getFirstHeader(anyString())).thenReturn(header); var httpRequest = mock(HttpRequestBase.class); - var httpResult = new HttpResult(httpResponse, new byte[] {}); - var handler = new OpenAiResponseHandler("", result -> null); // 200 ok @@ -50,11 +56,8 @@ public void testCheckForFailureStatusCode() { // 429 when(statusLine.getStatusCode()).thenReturn(429); retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); - assertFalse(retryException.shouldRetry()); - assertThat( - retryException.getCause().getMessage(), - containsString("Received a rate limit status code for request [null] status [429]") - ); + assertTrue(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Received a rate limit status code. Token limit")); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); // 401 when(statusLine.getStatusCode()).thenReturn(401); @@ -81,4 +84,76 @@ public void testCheckForFailureStatusCode() { ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.PAYMENT_REQUIRED)); } + + public void testBuildRateLimitErrorMessage() { + int statusCode = 429; + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(statusCode); + var requestLine = mock(RequestLine.class); + var response = mock(HttpResponse.class); + when(response.getStatusLine()).thenReturn(statusLine); + var request = mock(HttpRequestBase.class); + var httpResult = new HttpResult(response, new byte[] {}); + + { + when(response.getFirstHeader(OpenAiResponseHandler.REQUESTS_LIMIT)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REQUESTS_LIMIT, "3000") + ); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_REQUESTS)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REMAINING_REQUESTS, "2999") + ); + when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn( + new BasicHeader(OpenAiResponseHandler.TOKENS_LIMIT, "10000") + ); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REMAINING_TOKENS, "99800") + ); + + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + assertThat( + error, + containsString("Token limit [10000], remaining tokens [99800]. Request limit [3000], remaining requests [2999]") + ); + } + + { + when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn(null); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + assertThat( + error, + containsString("Token limit [unknown], remaining tokens [unknown]. Request limit [3000], remaining requests [2999]") + ); + } + + { + when(response.getFirstHeader(OpenAiResponseHandler.REQUESTS_LIMIT)).thenReturn(null); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_REQUESTS)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REMAINING_REQUESTS, "2999") + ); + when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn(null); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + assertThat( + error, + containsString("Token limit [unknown], remaining tokens [unknown]. Request limit [unknown], remaining requests [2999]") + ); + } + + { + when(response.getFirstHeader(OpenAiResponseHandler.REQUESTS_LIMIT)).thenReturn(null); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_REQUESTS)).thenReturn( + new BasicHeader(OpenAiResponseHandler.REMAINING_REQUESTS, "2999") + ); + when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn( + new BasicHeader(OpenAiResponseHandler.TOKENS_LIMIT, "10000") + ); + when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + assertThat( + error, + containsString("Token limit [10000], remaining tokens [unknown]. Request limit [unknown], remaining requests [2999]") + ); + } + } } From bbe9f115cc59bb7b7bfcc8d89cde1ab015427cc2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Tue, 5 Dec 2023 10:18:49 +0100 Subject: [PATCH 203/263] More version removal/mitigation (#102867) --- .../upgrades/FullClusterRestartIT.java | 59 +++++++++++++------ ...rameterizedFullClusterRestartTestCase.java | 14 +++-- .../upgrades/QueryBuilderBWCIT.java | 16 +++-- .../metadata/DataStreamTestHelper.java | 4 +- .../test/rest/RestTestLegacyFeatures.java | 35 ++++++++++- .../xpack/restart/FullClusterRestartIT.java | 6 +- .../xpack/restart/FullClusterRestartIT.java | 48 ++++++++++----- ...MLModelDeploymentFullClusterRestartIT.java | 2 +- ...nfigIndexMappingsFullClusterRestartIT.java | 6 +- .../MlHiddenIndicesFullClusterRestartIT.java | 6 +- .../MlMigrationFullClusterRestartIT.java | 4 +- .../xpack/restart/WatcherMappingUpdateIT.java | 12 ++-- 12 files changed, 150 insertions(+), 62 deletions(-) diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index e5bc4a729f8b1..db2904a53dd11 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; +import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.settings.RestClusterGetSettingsResponse; import org.elasticsearch.client.Request; @@ -25,6 +26,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -268,7 +270,10 @@ public void testNewReplicas() throws Exception { } public void testSearchTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", getOldClusterVersion().onOrAfter(Version.V_8_2_0)); + + var originalClusterHasNewTimeSeriesIndexing = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_2_0)) + .orElse(true); + assumeTrue("indexing time series indices changed in 8.2.0", originalClusterHasNewTimeSeriesIndexing); int numDocs; if (isRunningAgainstOldCluster()) { numDocs = createTimeSeriesModeIndex(1); @@ -310,7 +315,9 @@ public void testSearchTimeSeriesMode() throws Exception { } public void testNewReplicasTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", getOldClusterVersion().onOrAfter(Version.V_8_2_0)); + var originalClusterHasNewTimeSeriesIndexing = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_2_0)) + .orElse(true); + assumeTrue("indexing time series indices changed in 8.2.0", originalClusterHasNewTimeSeriesIndexing); if (isRunningAgainstOldCluster()) { createTimeSeriesModeIndex(0); } else { @@ -995,7 +1002,7 @@ public void testSnapshotRestore() throws IOException { { templateBuilder.startObject("term"); { - templateBuilder.field("version", isRunningAgainstOldCluster() ? getOldClusterVersion() : Version.CURRENT); + templateBuilder.field("version", isRunningAgainstOldCluster() ? getOldClusterVersion() : Build.current().version()); } templateBuilder.endObject(); } @@ -1035,7 +1042,7 @@ public void testSnapshotRestore() throws IOException { checkSnapshot("old_snap", count, getOldClusterVersion(), getOldClusterIndexVersion()); if (false == isRunningAgainstOldCluster()) { - checkSnapshot("new_snap", count, Version.CURRENT, IndexVersion.current()); + checkSnapshot("new_snap", count, Build.current().version(), IndexVersion.current()); } } @@ -1158,7 +1165,12 @@ public void testClosedIndices() throws Exception { closeIndex(index); } - if (getOldClusterVersion().onOrAfter(Version.V_7_2_0)) { + @UpdateForV9 // This check can be removed (always assume true) + var originalClusterSupportsReplicationOfClosedIndices = parseLegacyVersion(getOldClusterVersion()).map( + v -> v.onOrAfter(Version.V_7_2_0) + ).orElse(true); + + if (originalClusterSupportsReplicationOfClosedIndices) { ensureGreenLongWait(index); assertClosedIndex(index, true); } else { @@ -1225,7 +1237,7 @@ private void assertClosedIndex(final String indexName, final boolean checkRoutin } @SuppressWarnings("unchecked") - private void checkSnapshot(String snapshotName, int count, Version tookOnVersion, IndexVersion tookOnIndexVersion) throws IOException { + private void checkSnapshot(String snapshotName, int count, String tookOnVersion, IndexVersion tookOnIndexVersion) throws IOException { // Check the snapshot metadata, especially the version Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName); Map snapResponse = entityAsMap(client().performRequest(listSnapshotRequest)); @@ -1235,7 +1247,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion // the format can change depending on the ES node version running & this test code running assertThat( XContentMapValues.extractValue("snapshots.version", snapResponse), - either(Matchers.equalTo(List.of(tookOnVersion.toString()))).or(equalTo(List.of(tookOnIndexVersion.toString()))) + either(Matchers.equalTo(List.of(tookOnVersion))).or(equalTo(List.of(tookOnIndexVersion.toString()))) ); // Remove the routing setting and template so we can test restoring them. @@ -1295,7 +1307,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion Map clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest)); @SuppressWarnings("unchecked") final Map persistentSettings = (Map) clusterSettingsResponse.get("persistent"); - assertThat(persistentSettings.get("cluster.routing.allocation.exclude.test_attr"), equalTo(getOldClusterVersion().toString())); + assertThat(persistentSettings.get("cluster.routing.allocation.exclude.test_attr"), equalTo(getOldClusterVersion())); // Check that the template was restored successfully Request getTemplateRequest = new Request("GET", "/_template/test_template"); @@ -1310,14 +1322,14 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion expectedTemplate.put("order", 0); Map aliases = new HashMap<>(); aliases.put("alias1", emptyMap()); - aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion.toString())))); + aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion)))); expectedTemplate.put("aliases", aliases); expectedTemplate = singletonMap("test_template", expectedTemplate); if (false == expectedTemplate.equals(getTemplateResponse)) { NotEqualMessageBuilder builder = new NotEqualMessageBuilder(); builder.compareMaps(getTemplateResponse, expectedTemplate); logger.info("expected: {}\nactual:{}", expectedTemplate, getTemplateResponse); - fail("template doesn't match:\n" + builder.toString()); + fail("template doesn't match:\n" + builder); } } @@ -1561,7 +1573,12 @@ public void testResize() throws Exception { @SuppressWarnings("unchecked") public void testSystemIndexMetadataIsUpgraded() throws Exception { - assumeTrue(".tasks became a system index in 7.10.0", getOldClusterVersion().onOrAfter(Version.V_7_10_0)); + + @UpdateForV9 // assumeTrue can be removed (condition always true) + var originalClusterTaskIndexIsSystemIndex = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_10_0)) + .orElse(true); + + assumeTrue(".tasks became a system index in 7.10.0", originalClusterTaskIndexIsSystemIndex); final String systemIndexWarning = "this request accesses system indices: [.tasks], but in a future major version, direct " + "access to system indices will be prevented by default"; if (isRunningAgainstOldCluster()) { @@ -1669,8 +1686,15 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception { } } + /** + * This test ensures that soft deletes are enabled a when upgrading a pre-8 cluster to 8.0+ + */ + @UpdateForV9 // This test can be removed in v9 public void testEnableSoftDeletesOnRestore() throws Exception { - assumeTrue("soft deletes must be enabled on 8.0+", getOldClusterVersion().before(Version.V_8_0_0)); + var originalClusterDidNotEnforceSoftDeletes = parseLegacyVersion(getOldClusterVersion()).map(v -> v.before(Version.V_8_0_0)) + .orElse(false); + + assumeTrue("soft deletes must be enabled on 8.0+", originalClusterDidNotEnforceSoftDeletes); final String snapshot = "snapshot-" + index; if (isRunningAgainstOldCluster()) { final Settings.Builder settings = indexSettings(1, 1); @@ -1783,16 +1807,15 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception { } /** - * In 7.14 the cluster.remote.*.transport.compress setting was change from a boolean to an enum setting + * In 7.14 the cluster.remote.*.transport.compress setting was changed from a boolean to an enum setting * with true/false as options. This test ensures that the old boolean setting in cluster state is * translated properly. This test can be removed in 9.0. */ + @UpdateForV9 public void testTransportCompressionSetting() throws IOException { - assumeTrue("the old transport.compress setting existed before 7.14", getOldClusterVersion().before(Version.V_7_14_0)); - assumeTrue( - "Early versions of 6.x do not have cluster.remote* prefixed settings", - getOldClusterVersion().onOrAfter(Version.fromString("6.8.0")) - ); + var originalClusterCompressSettingIsBoolean = parseLegacyVersion(getOldClusterVersion()).map(v -> v.before(Version.V_7_14_0)) + .orElse(false); + assumeTrue("the old transport.compress setting existed before 7.14", originalClusterCompressSettingIsBoolean); if (isRunningAgainstOldCluster()) { final Request putSettingsRequest = new Request("PUT", "/_cluster/settings"); try (XContentBuilder builder = jsonBuilder()) { diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java index eef8f62eedd98..05a2892717697 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; @@ -34,7 +35,7 @@ @TestCaseOrdering(FullClusterRestartTestOrdering.class) public abstract class ParameterizedFullClusterRestartTestCase extends ESRestTestCase { private static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString("7.17.0"); - private static final Version OLD_CLUSTER_VERSION = Version.fromString(System.getProperty("tests.old_cluster_version")); + private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); private static IndexVersion oldIndexVersion; private static boolean upgradeFailed = false; private static boolean upgraded = false; @@ -66,7 +67,8 @@ public void extractOldIndexVersion() throws Exception { version = IndexVersion.fromId(ix.intValue()); } else { // it doesn't have index version (pre 8.11) - just infer it from the release version - version = IndexVersion.fromId(getOldClusterVersion().id); + version = parseLegacyVersion(OLD_CLUSTER_VERSION).map(x -> IndexVersion.fromId(x.id())) + .orElse(IndexVersions.MINIMUM_COMPATIBLE); } if (indexVersion == null) { @@ -86,7 +88,7 @@ public void extractOldIndexVersion() throws Exception { public void maybeUpgrade() throws Exception { if (upgraded == false && requestedUpgradeStatus == UPGRADED) { try { - if (OLD_CLUSTER_VERSION.before(MINIMUM_WIRE_COMPATIBLE_VERSION)) { + if (getOldClusterTestVersion().before(MINIMUM_WIRE_COMPATIBLE_VERSION)) { // First upgrade to latest wire compatible version getUpgradeCluster().upgradeToVersion(MINIMUM_WIRE_COMPATIBLE_VERSION); } @@ -115,8 +117,8 @@ public boolean isRunningAgainstOldCluster() { return requestedUpgradeStatus == OLD; } - public static org.elasticsearch.Version getOldClusterVersion() { - return org.elasticsearch.Version.fromString(OLD_CLUSTER_VERSION.toString()); + public static String getOldClusterVersion() { + return OLD_CLUSTER_VERSION; } public static IndexVersion getOldClusterIndexVersion() { @@ -125,7 +127,7 @@ public static IndexVersion getOldClusterIndexVersion() { } public static Version getOldClusterTestVersion() { - return Version.fromString(OLD_CLUSTER_VERSION.toString()); + return Version.fromString(OLD_CLUSTER_VERSION); } protected abstract ElasticsearchCluster getUpgradeCluster(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index e2f70db0f69ba..3b58cf932fa61 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -21,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; import org.elasticsearch.index.query.DisMaxQueryBuilder; @@ -248,11 +250,17 @@ public void testQueryBuilderBWC() throws Exception { InputStream in = new ByteArrayInputStream(qbSource, 0, qbSource.length); StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry) ) { - Version clusterVersion = getOldClusterVersion(); - TransportVersion transportVersion; - if (clusterVersion.before(Version.V_8_8_0)) { - transportVersion = TransportVersion.fromId(clusterVersion.id); + @UpdateForV9 // always true + var originalClusterHasTransportVersion = parseLegacyVersion(getOldClusterVersion()).map( + v -> v.onOrAfter(Version.V_8_8_0) + ).orElse(true); + + final TransportVersion transportVersion; + if (originalClusterHasTransportVersion == false) { + transportVersion = TransportVersion.fromId( + parseLegacyVersion(getOldClusterVersion()).map(Version::id).orElse(TransportVersions.MINIMUM_COMPATIBLE.id()) + ); } else { transportVersion = TransportVersion.readVersion(input); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 9017e88f430b5..e3e11907534e2 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -120,9 +120,9 @@ public static String getLegacyDefaultBackingIndexName( String dataStreamName, long generation, long epochMillis, - Version minNodeVersion + boolean isNewIndexNameFormat ) { - if (minNodeVersion.onOrAfter(DATE_IN_BACKING_INDEX_VERSION)) { + if (isNewIndexNameFormat) { return String.format( Locale.ROOT, BACKING_INDEX_PREFIX + "%s-%s-%06d", diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index bd19757bac438..1530809a064b1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -9,6 +9,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.Version; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -23,10 +24,16 @@ */ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature ML_STATE_RESET_FALLBACK_ON_DISABLED = new NodeFeature("ml.state_reset_fallback_on_disabled"); + @UpdateForV9 public static final NodeFeature FEATURE_STATE_RESET_SUPPORTED = new NodeFeature("system_indices.feature_state_reset_supported"); public static final NodeFeature SYSTEM_INDICES_REST_ACCESS_ENFORCED = new NodeFeature("system_indices.rest_access_enforced"); + @UpdateForV9 + public static final NodeFeature SYSTEM_INDICES_REST_ACCESS_DEPRECATED = new NodeFeature("system_indices.rest_access_deprecated"); + @UpdateForV9 public static final NodeFeature HIDDEN_INDICES_SUPPORTED = new NodeFeature("indices.hidden_supported"); + @UpdateForV9 public static final NodeFeature COMPONENT_TEMPLATE_SUPPORTED = new NodeFeature("indices.component_template_supported"); + @UpdateForV9 public static final NodeFeature DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED = new NodeFeature( "indices.delete_template_multiple_names_supported" ); @@ -34,18 +41,44 @@ public class RestTestLegacyFeatures implements FeatureSpecification { // QA - rolling upgrade tests public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); + @UpdateForV9 + + public static final NodeFeature WATCHES_VERSION_IN_META = new NodeFeature("watcher.version_in_meta"); + @UpdateForV9 + public static final NodeFeature SECURITY_ROLE_DESCRIPTORS_OPTIONAL = new NodeFeature("security.role_descriptors_optional"); + @UpdateForV9 + public static final NodeFeature SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM = new NodeFeature( + "search.aggregations.force_interval_selection_on_date_histogram" + ); + @UpdateForV9 + public static final NodeFeature TRANSFORM_NEW_API_ENDPOINT = new NodeFeature("transform.new_api_endpoint"); + // Ref: https://github.com/elastic/elasticsearch/pull/65205 + @UpdateForV9 + public static final NodeFeature DATA_STREAMS_DATE_IN_INDEX_NAME = new NodeFeature("data-streams.date_in_index_name"); + @UpdateForV9 + public static final NodeFeature ML_INDICES_HIDDEN = new NodeFeature("ml.indices_hidden"); + @UpdateForV9 + public static final NodeFeature ML_ANALYTICS_MAPPINGS = new NodeFeature("ml.analytics_mappings"); @Override public Map getHistoricalFeatures() { return Map.ofEntries( entry(FEATURE_STATE_RESET_SUPPORTED, Version.V_7_13_0), entry(SYSTEM_INDICES_REST_ACCESS_ENFORCED, Version.V_8_0_0), + entry(SYSTEM_INDICES_REST_ACCESS_DEPRECATED, Version.V_7_10_0), entry(HIDDEN_INDICES_SUPPORTED, Version.V_7_7_0), entry(COMPONENT_TEMPLATE_SUPPORTED, Version.V_7_8_0), entry(DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED, Version.V_7_13_0), entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0), entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), - entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0) + entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), + entry(WATCHES_VERSION_IN_META, Version.V_7_13_0), + entry(SECURITY_ROLE_DESCRIPTORS_OPTIONAL, Version.V_7_3_0), + entry(SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM, Version.V_7_2_0), + entry(TRANSFORM_NEW_API_ENDPOINT, Version.V_7_5_0), + entry(DATA_STREAMS_DATE_IN_INDEX_NAME, Version.V_7_11_0), + entry(ML_INDICES_HIDDEN, Version.V_7_7_0), + entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0) ); } } diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 07ed594770649..7a90907b9cf39 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -88,7 +89,10 @@ protected Settings restClientSettings() { @BeforeClass public static void checkClusterVersion() { - assumeTrue("no shutdown in versions before " + Version.V_7_15_0, getOldClusterVersion().onOrAfter(Version.V_7_15_0)); + @UpdateForV9 // always true + var originalClusterSupportsShutdown = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_15_0)) + .orElse(true); + assumeTrue("no shutdown in versions before 7.15", originalClusterSupportsShutdown); } @SuppressWarnings("unchecked") diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index ee75052ae1da7..2ad66f071d784 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -23,10 +23,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; @@ -289,7 +291,11 @@ public void testWatcherWithApiKey() throws Exception { } public void testServiceAccountApiKey() throws IOException { - assumeTrue("no service accounts in versions before " + Version.V_7_13_0, getOldClusterVersion().onOrAfter(Version.V_7_13_0)); + @UpdateForV9 + var originalClusterSupportsServiceAccounts = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_13_0)) + .orElse(true); + assumeTrue("no service accounts in versions before 7.13", originalClusterSupportsServiceAccounts); + if (isRunningAgainstOldCluster()) { final Request createServiceTokenRequest = new Request("POST", "/_security/service/elastic/fleet-server/credential/token"); final Response createServiceTokenResponse = client().performRequest(createServiceTokenRequest); @@ -352,7 +358,7 @@ public void testApiKeySuperuser() throws IOException { ) ) ); - if (getOldClusterVersion().onOrAfter(Version.V_7_3_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.SECURITY_ROLE_DESCRIPTORS_OPTIONAL)) { createApiKeyRequest.setJsonEntity(""" { "name": "super_legacy_key" @@ -385,13 +391,13 @@ public void testApiKeySuperuser() throws IOException { saveApiKeyRequest.setJsonEntity("{\"auth_header\":\"" + apiKeyAuthHeader + "\"}"); assertOK(client().performRequest(saveApiKeyRequest)); - if (getOldClusterVersion().before(Version.V_8_0_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_ENFORCED) == false) { final Request indexRequest = new Request("POST", ".security/_doc"); indexRequest.setJsonEntity(""" { "doc_type": "foo" }"""); - if (getOldClusterVersion().onOrAfter(Version.V_7_10_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_DEPRECATED)) { indexRequest.setOptions(systemIndexWarningHandlerOptions(".security-7").addHeader("Authorization", apiKeyAuthHeader)); } else { indexRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", apiKeyAuthHeader)); @@ -447,7 +453,7 @@ public void testRollupAfterRestart() throws Exception { final Request createRollupJobRequest = new Request("PUT", "/_rollup/job/rollup-job-test"); String intervalType; - if (getOldClusterVersion().onOrAfter(Version.V_7_2_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM)) { intervalType = "fixed_interval"; } else { intervalType = "interval"; @@ -497,7 +503,11 @@ public void testRollupAfterRestart() throws Exception { } public void testTransformLegacyTemplateCleanup() throws Exception { - assumeTrue("Before 7.2 transforms didn't exist", getOldClusterVersion().onOrAfter(Version.V_7_2_0)); + @UpdateForV9 + var originalClusterSupportsTransform = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_2_0)) + .orElse(true); + assumeTrue("Before 7.2 transforms didn't exist", originalClusterSupportsTransform); + if (isRunningAgainstOldCluster()) { // create the source index @@ -520,7 +530,7 @@ public void testTransformLegacyTemplateCleanup() throws Exception { assertThat(createIndexResponse.get("acknowledged"), equalTo(Boolean.TRUE)); // create a transform - String endpoint = getOldClusterVersion().onOrAfter(Version.V_7_5_0) + String endpoint = clusterHasFeature(RestTestLegacyFeatures.TRANSFORM_NEW_API_ENDPOINT) ? "_transform/transform-full-cluster-restart-test" : "_data_frame/transforms/transform-full-cluster-restart-test"; final Request createTransformRequest = new Request("PUT", endpoint); @@ -584,7 +594,7 @@ public void testSlmPolicyAndStats() throws IOException { Collections.singletonMap("indices", Collections.singletonList("*")), null ); - if (isRunningAgainstOldCluster() && getOldClusterVersion().onOrAfter(Version.V_7_4_0)) { + if (isRunningAgainstOldCluster() && has(ProductFeature.SLM)) { Request createRepoRequest = new Request("PUT", "_snapshot/test-repo"); String repoCreateJson = "{" + " \"type\": \"fs\"," + " \"settings\": {" + " \"location\": \"test-repo\"" + " }" + "}"; createRepoRequest.setJsonEntity(repoCreateJson); @@ -598,7 +608,7 @@ public void testSlmPolicyAndStats() throws IOException { client().performRequest(createSlmPolicyRequest); } - if (isRunningAgainstOldCluster() == false && getOldClusterVersion().onOrAfter(Version.V_7_4_0)) { + if (isRunningAgainstOldCluster() == false && has(ProductFeature.SLM)) { Request getSlmPolicyRequest = new Request("GET", "_slm/policy/test-policy"); Response response = client().performRequest(getSlmPolicyRequest); Map responseMap = entityAsMap(response); @@ -749,11 +759,7 @@ private void waitForHits(String indexName, int expectedHits) throws Exception { Map hits = (Map) response.get("hits"); logger.info("Hits are: {}", hits); Integer total; - if (getOldClusterVersion().onOrAfter(Version.V_7_0_0) || isRunningAgainstOldCluster() == false) { - total = (Integer) ((Map) hits.get("total")).get("value"); - } else { - total = (Integer) hits.get("total"); - } + total = (Integer) ((Map) hits.get("total")).get("value"); assertThat(total, greaterThanOrEqualTo(expectedHits)); } catch (IOException ioe) { if (ioe instanceof ResponseException) { @@ -929,7 +935,12 @@ private void waitForRollUpJob(final String rollupJob, final Matcher expectedS @SuppressWarnings("unchecked") public void testDataStreams() throws Exception { - assumeTrue("no data streams in versions before " + Version.V_7_9_0, getOldClusterVersion().onOrAfter(Version.V_7_9_0)); + + @UpdateForV9 + var originalClusterSupportsDataStreams = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_7_9_0)) + .orElse(true); + + assumeTrue("no data streams in versions before 7.9.0", originalClusterSupportsDataStreams); if (isRunningAgainstOldCluster()) { createComposableTemplate(client(), "dst", "ds"); @@ -966,7 +977,12 @@ public void testDataStreams() throws Exception { assertEquals("ds", ds.get("name")); assertEquals(1, indices.size()); assertEquals( - DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp, getOldClusterVersion()), + DataStreamTestHelper.getLegacyDefaultBackingIndexName( + "ds", + 1, + timestamp, + clusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_DATE_IN_INDEX_NAME) + ), indices.get(0).get("index_name") ); assertNumHits("ds", 1, 1); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java index f67d1e4c37b28..b2594eaf02ea4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java @@ -92,7 +92,7 @@ protected Settings restClientSettings() { public void testDeploymentSurvivesRestart() throws Exception { @UpdateForV9 // upgrade will always be from v8, condition can be removed - var originalClusterAtLeastV8 = getOldClusterVersion().onOrAfter(Version.V_8_0_0); + var originalClusterAtLeastV8 = parseLegacyVersion(getOldClusterVersion()).map(v -> v.onOrAfter(Version.V_8_0_0)).orElse(true); // These tests assume the original cluster is v8 - testing for features on the _current_ cluster will break for NEW assumeTrue("NLP model deployments added in 8.0", originalClusterAtLeastV8); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java index e4ab3957f2627..3674f811ebb0a 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java @@ -8,13 +8,13 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.test.rest.IndexMappingTemplateAsserter; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; @@ -53,7 +53,7 @@ public void waitForMlTemplates() throws Exception { XPackRestTestHelper.waitForTemplates( client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - getOldClusterVersion().onOrAfter(Version.V_7_8_0) + clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) ); } } @@ -62,7 +62,7 @@ public void testMlConfigIndexMappingsAfterMigration() throws Exception { if (isRunningAgainstOldCluster()) { // trigger .ml-config index creation createAnomalyDetectorJob(OLD_CLUSTER_JOB_ID); - if (getOldClusterVersion().onOrAfter(Version.V_7_3_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.ML_ANALYTICS_MAPPINGS)) { // .ml-config has mappings for analytics as the feature was introduced in 7.3.0 assertThat(getDataFrameAnalysisMappings().keySet(), hasItem("outlier_detection")); } else { diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java index aeb3dad547946..16345a19fc950 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java @@ -8,7 +8,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -18,6 +17,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -68,7 +68,7 @@ public void waitForMlTemplates() throws Exception { XPackRestTestHelper.waitForTemplates( client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - getOldClusterVersion().onOrAfter(Version.V_7_8_0) + clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) ); } } @@ -79,7 +79,7 @@ public void testMlIndicesBecomeHidden() throws Exception { createAnomalyDetectorJob(JOB_ID); openAnomalyDetectorJob(JOB_ID); - if (getOldClusterVersion().before(Version.V_7_7_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.ML_INDICES_HIDDEN) == false) { Map indexSettingsMap = contentAsMap(getMlIndicesSettings()); Map aliasesMap = contentAsMap(getMlAliases()); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java index 2bbda9123ae34..0b15e98f201a0 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java @@ -8,7 +8,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; @@ -19,6 +18,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -61,7 +61,7 @@ public void waitForMlTemplates() throws Exception { XPackRestTestHelper.waitForTemplates( client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - getOldClusterVersion().onOrAfter(Version.V_7_8_0) + clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) ); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java index 1f0e245691b57..fb7c22845b788 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/WatcherMappingUpdateIT.java @@ -11,12 +11,13 @@ import org.apache.http.util.EntityUtils; import org.apache.lucene.tests.util.LuceneTestCase; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import java.nio.charset.StandardCharsets; @@ -64,18 +65,18 @@ public void testMappingsAreUpdated() throws Exception { """); client().performRequest(putWatchRequest); - if (getOldClusterVersion().onOrAfter(Version.V_7_13_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.WATCHES_VERSION_IN_META)) { assertMappingVersion(".watches", getOldClusterVersion()); } else { // watches indices from before 7.10 do not have mapping versions in _meta assertNoMappingVersion(".watches"); } } else { - assertMappingVersion(".watches", Version.CURRENT); + assertMappingVersion(".watches", Build.current().version()); } } - private void assertMappingVersion(String index, Version clusterVersion) throws Exception { + private void assertMappingVersion(String index, String clusterVersion) throws Exception { assertBusy(() -> { Request mappingRequest = new Request("GET", index + "/_mappings"); mappingRequest.setOptions(getWarningHandlerOptions(index)); @@ -88,7 +89,8 @@ private void assertMappingVersion(String index, Version clusterVersion) throws E private void assertNoMappingVersion(String index) throws Exception { assertBusy(() -> { Request mappingRequest = new Request("GET", index + "/_mappings"); - if (isRunningAgainstOldCluster() == false || getOldClusterVersion().onOrAfter(Version.V_7_10_0)) { + assert isRunningAgainstOldCluster(); + if (clusterHasFeature(RestTestLegacyFeatures.SYSTEM_INDICES_REST_ACCESS_DEPRECATED)) { mappingRequest.setOptions(getWarningHandlerOptions(index)); } Response response = client().performRequest(mappingRequest); From 8e332b65e9df183c5c0ee631528fd2f11523a398 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 5 Dec 2023 10:23:30 +0100 Subject: [PATCH 204/263] [Enterprise Search] Add update sync job error endpoint (#102913) Add update sync job error endpoint. --- .../api/connector_sync_job.error.json | 39 +++++ .../450_connector_sync_job_error.yml | 40 +++++ .../xpack/application/EnterpriseSearch.java | 5 + .../connector/syncjob/ConnectorSyncJob.java | 2 +- .../syncjob/ConnectorSyncJobIndexService.java | 37 +++++ ...RestUpdateConnectorSyncJobErrorAction.java | 52 +++++++ ...portUpdateConnectorSyncJobErrorAction.java | 53 +++++++ .../UpdateConnectorSyncJobErrorAction.java | 145 ++++++++++++++++++ .../ConnectorSyncJobIndexServiceTests.java | 55 +++++++ .../syncjob/ConnectorSyncJobTestUtils.java | 5 + ...pdateConnectorSyncJobErrorActionTests.java | 74 +++++++++ ...rorActionRequestBWCSerializationTests.java | 53 +++++++ ...pdateConnectorSyncJobErrorActionTests.java | 47 ++++++ .../xpack/security/operator/Constants.java | 7 +- 14 files changed, 610 insertions(+), 4 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json new file mode 100644 index 0000000000000..5db061eda6e48 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json @@ -0,0 +1,39 @@ +{ + "connector_sync_job.error": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Sets an error for a connector sync job." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job/{connector_sync_job_id}/_error", + "methods": [ + "PUT" + ], + "parts": { + "connector_sync_job_id": { + "type": "string", + "description": "The unique identifier of the connector sync job to set an error for." + } + } + } + ] + }, + "body": { + "description": "The error to set in the connector sync job.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml new file mode 100644 index 0000000000000..1ba3cf1c50b7c --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml @@ -0,0 +1,40 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + +--- +"Set an error for a connector sync job": + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + - set: { id: id } + - do: + connector_sync_job.error: + connector_sync_job_id: $id + body: + error: error + + - match: { acknowledged: true } + + +--- +"Set an error for a Connector Sync Job - Connector Sync Job does not exist": + - do: + connector_sync_job.error: + connector_sync_job_id: test-nonexistent-connector-sync-job-id + body: + error: error + catch: missing diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index c4dbee214f37a..ce9bbfa4d6a4b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -85,13 +85,16 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportGetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobIngestionStatsAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; @@ -227,6 +230,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class), + new ActionHandler<>(UpdateConnectorSyncJobErrorAction.INSTANCE, TransportUpdateConnectorSyncJobErrorAction.class), new ActionHandler<>( UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, TransportUpdateConnectorSyncJobIngestionStatsAction.class @@ -300,6 +304,7 @@ public List getRestHandlers( new RestDeleteConnectorSyncJobAction(), new RestCancelConnectorSyncJobAction(), new RestCheckInConnectorSyncJobAction(), + new RestUpdateConnectorSyncJobErrorAction(), new RestUpdateConnectorSyncJobIngestionStatsAction() ) ); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 0781bb515fe93..211a423dab99e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -77,7 +77,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { public static final ParseField DELETED_DOCUMENT_COUNT_FIELD = new ParseField("deleted_document_count"); - static final ParseField ERROR_FIELD = new ParseField("error"); + public static final ParseField ERROR_FIELD = new ParseField("error"); public static final ParseField ID_FIELD = new ParseField("id"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index f105e6ece72aa..9bcd03eb21ca9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -356,6 +356,43 @@ public void onFailure(Exception e) { } } + /** + * Sets the error for the {@link ConnectorSyncJob} in the underlying index. + * This also sets the {@link ConnectorSyncStatus} to 'ERROR'. + * + * @param connectorSyncJobId The id of the connector sync job object. + * @param error The error to set. + * @param listener The action listener to invoke on response/failure. + */ + public void updateConnectorSyncJobError(String connectorSyncJobId, String error, ActionListener listener) { + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_SYNC_JOB_INDEX_NAME, connectorSyncJobId).setRefreshPolicy( + WriteRequest.RefreshPolicy.IMMEDIATE + ) + .doc( + Map.of( + ConnectorSyncJob.ERROR_FIELD.getPreferredName(), + error, + ConnectorSyncJob.STATUS_FIELD.getPreferredName(), + ConnectorSyncStatus.ERROR + ) + ); + + try { + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundOrDocumentMissingActionListener<>(connectorSyncJobId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorSyncJobId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Listeners that checks failures for IndexNotFoundException and DocumentMissingException, * and transforms them in ResourceNotFoundException, invoking onFailure on the delegate listener. diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java new file mode 100644 index 0000000000000..e19a9675beebb --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.CONNECTOR_SYNC_JOB_ID_PARAM; + +public class RestUpdateConnectorSyncJobErrorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_sync_job_update_error_action"; + } + + @Override + public List routes() { + return List.of( + new Route( + RestRequest.Method.PUT, + "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT + "/{" + CONNECTOR_SYNC_JOB_ID_PARAM + "}/_error" + ) + ); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + UpdateConnectorSyncJobErrorAction.Request request = UpdateConnectorSyncJobErrorAction.Request.fromXContentBytes( + restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM), + restRequest.content(), + restRequest.getXContentType() + ); + + return restChannel -> client.execute( + UpdateConnectorSyncJobErrorAction.INSTANCE, + request, + new RestToXContentListener<>(restChannel) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java new file mode 100644 index 0000000000000..c814092f2e7a2 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; + +public class TransportUpdateConnectorSyncJobErrorAction extends HandledTransportAction< + UpdateConnectorSyncJobErrorAction.Request, + AcknowledgedResponse> { + + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportUpdateConnectorSyncJobErrorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorSyncJobErrorAction.NAME, + transportService, + actionFilters, + UpdateConnectorSyncJobErrorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute(Task task, UpdateConnectorSyncJobErrorAction.Request request, ActionListener listener) { + connectorSyncJobIndexService.updateConnectorSyncJobError( + request.getConnectorSyncJobId(), + request.getError(), + listener.map(r -> AcknowledgedResponse.TRUE) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java new file mode 100644 index 0000000000000..820630bccee03 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class UpdateConnectorSyncJobErrorAction extends ActionType { + + public static final UpdateConnectorSyncJobErrorAction INSTANCE = new UpdateConnectorSyncJobErrorAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/update_error"; + public static final String ERROR_EMPTY_MESSAGE = "[error] of the connector sync job cannot be null or empty"; + + private UpdateConnectorSyncJobErrorAction() { + super(NAME, AcknowledgedResponse::readFrom); + } + + public static class Request extends ActionRequest implements ToXContentObject { + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_sync_job_error_request", + false, + ((args, connectorSyncJobId) -> new Request(connectorSyncJobId, (String) args[0])) + ); + + static { + PARSER.declareString(constructorArg(), ConnectorSyncJob.ERROR_FIELD); + } + + private final String connectorSyncJobId; + + private final String error; + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorSyncJobId = in.readString(); + this.error = in.readString(); + } + + public Request(String connectorSyncJobId, String error) { + this.connectorSyncJobId = connectorSyncJobId; + this.error = error; + } + + public static Request fromXContentBytes(String connectorSyncJobId, BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorSyncJobErrorAction.Request.fromXContent(parser, connectorSyncJobId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString()); + } + } + + public static UpdateConnectorSyncJobErrorAction.Request fromXContent(XContentParser parser, String connectorSyncJobId) + throws IOException { + return PARSER.parse(parser, connectorSyncJobId); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorSyncJobId)) { + validationException = addValidationError( + ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE, + validationException + ); + } + + if (Strings.isNullOrEmpty(error)) { + validationException = addValidationError(ERROR_EMPTY_MESSAGE, validationException); + } + + return validationException; + } + + public String getConnectorSyncJobId() { + return connectorSyncJobId; + } + + public String getError() { + return error; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorSyncJobId); + out.writeString(error); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorSyncJobId, request.connectorSyncJobId) && Objects.equals(error, request.error); + } + + @Override + public int hashCode() { + return Objects.hash(connectorSyncJobId, error); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(ConnectorSyncJob.ERROR_FIELD.getPreferredName(), error); + } + builder.endObject(); + return builder; + } + + public static UpdateConnectorSyncJobErrorAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index 2dcf43c6f3f22..ab16fb8a46eb0 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.junit.Before; @@ -253,6 +254,35 @@ public void testCancelConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCancelConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testUpdateConnectorSyncJobError() throws Exception { + PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connector.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); + String syncJobId = response.getId(); + + UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); + String errorInRequest = request.getError(); + + UpdateResponse updateResponse = awaitUpdateConnectorSyncJob(syncJobId, errorInRequest); + Map connectorSyncJobSource = getConnectorSyncJobSourceById(syncJobId); + String error = (String) connectorSyncJobSource.get(ConnectorSyncJob.ERROR_FIELD.getPreferredName()); + ConnectorSyncStatus syncStatus = ConnectorSyncStatus.fromString( + (String) connectorSyncJobSource.get(ConnectorSyncJob.STATUS_FIELD.getPreferredName()) + ); + + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + assertThat(error, equalTo(errorInRequest)); + assertThat(syncStatus, equalTo(ConnectorSyncStatus.ERROR)); + } + + public void testUpdateConnectorSyncJobError_WithMissingSyncJobId_ExceptException() { + expectThrows( + ResourceNotFoundException.class, + () -> awaitUpdateConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID, randomAlphaOfLengthBetween(5, 100)) + ); + } + public void testUpdateConnectorSyncJobIngestionStats() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( connector.getConnectorId() @@ -435,6 +465,31 @@ private static void assertFieldsDidNotUpdateExceptFieldList( } } + private UpdateResponse awaitUpdateConnectorSyncJob(String syncJobId, String error) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorSyncJobIndexService.updateConnectorSyncJobError(syncJobId, error, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse updateResponse) { + resp.set(updateResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitCancelConnectorSyncJob(String syncJobId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index 8170391094356..fb412db168605 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import java.time.Instant; @@ -104,6 +105,10 @@ public static CheckInConnectorSyncJobAction.Request getRandomCheckInConnectorSyn return new CheckInConnectorSyncJobAction.Request(randomAlphaOfLength(10)); } + public static UpdateConnectorSyncJobErrorAction.Request getRandomUpdateConnectorSyncJobErrorActionRequest() { + return new UpdateConnectorSyncJobErrorAction.Request(randomAlphaOfLength(10), randomAlphaOfLengthBetween(5, 100)); + } + public static UpdateConnectorSyncJobIngestionStatsAction.Request getRandomUpdateConnectorSyncJobIngestionStatsActionRequest() { Instant lowerBoundInstant = Instant.ofEpochSecond(0L); Instant upperBoundInstant = Instant.ofEpochSecond(3000000000L); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java new file mode 100644 index 0000000000000..fd974d5fd21f7 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorActionTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportUpdateConnectorSyncJobErrorActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportUpdateConnectorSyncJobErrorAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportUpdateConnectorSyncJobErrorAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testUpdateConnectorSyncJobError_ExpectNoWarnings() throws InterruptedException { + UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(UpdateConnectorSyncJobErrorAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for update request", requestTimedOut); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests.java new file mode 100644 index 0000000000000..a6c52d8cbf62c --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorSyncJobErrorActionRequestBWCSerializationTests extends AbstractBWCSerializationTestCase< + UpdateConnectorSyncJobErrorAction.Request> { + + private String connectorSyncJobId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorSyncJobErrorAction.Request::new; + } + + @Override + protected UpdateConnectorSyncJobErrorAction.Request createTestInstance() { + UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); + this.connectorSyncJobId = request.getConnectorSyncJobId(); + return request; + } + + @Override + protected UpdateConnectorSyncJobErrorAction.Request mutateInstance(UpdateConnectorSyncJobErrorAction.Request instance) + throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorSyncJobErrorAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorSyncJobErrorAction.Request.fromXContent(parser, this.connectorSyncJobId); + } + + @Override + protected UpdateConnectorSyncJobErrorAction.Request mutateInstanceForVersion( + UpdateConnectorSyncJobErrorAction.Request instance, + TransportVersion version + ) { + return new UpdateConnectorSyncJobErrorAction.Request(instance.getConnectorSyncJobId(), instance.getError()); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionTests.java new file mode 100644 index 0000000000000..0899aa3b599df --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorActionTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class UpdateConnectorSyncJobErrorActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSyncJobIdAndErrorArePresent_ExpectNoValidationError() { + UpdateConnectorSyncJobErrorAction.Request request = ConnectorSyncJobTestUtils.getRandomUpdateConnectorSyncJobErrorActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSyncJobIdIsEmpty_ExceptionValidationError() { + UpdateConnectorSyncJobErrorAction.Request request = new UpdateConnectorSyncJobErrorAction.Request( + "", + randomAlphaOfLengthBetween(10, 100) + ); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE)); + } + + public void testValidate_WhenErrorIsEmpty_ExceptionValidationError() { + UpdateConnectorSyncJobErrorAction.Request request = new UpdateConnectorSyncJobErrorAction.Request(randomAlphaOfLength(10), ""); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString(UpdateConnectorSyncJobErrorAction.ERROR_EMPTY_MESSAGE)); + } + +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 11e293d8675f7..5c4fd44d77c9b 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -134,11 +134,12 @@ public class Constants { "cluster:admin/xpack/connector/update_last_sync_stats", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", - "cluster:admin/xpack/connector/sync_job/post", - "cluster:admin/xpack/connector/sync_job/delete", + "cluster:admin/xpack/connector/sync_job/cancel", "cluster:admin/xpack/connector/sync_job/check_in", + "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/get", - "cluster:admin/xpack/connector/sync_job/cancel", + "cluster:admin/xpack/connector/sync_job/post", + "cluster:admin/xpack/connector/sync_job/update_error", "cluster:admin/xpack/connector/sync_job/update_stats", "cluster:admin/xpack/deprecation/info", "cluster:admin/xpack/deprecation/nodes/info", From 2690fe3cf35e4432f33a0d8c6f0c9d8e89417dc3 Mon Sep 17 00:00:00 2001 From: Andrei Dan Date: Tue, 5 Dec 2023 11:27:39 +0200 Subject: [PATCH 205/263] Report DSL error entry information to the health node (#102635) Add support to the DataStreamLifecycleService to report information regarding the top 500 (configurable) error entries, sorted by retry count, to the health node. After every DSL loop (every 5 minutes by default) the error entries information is reported to the health node. We omit the error message in the entries as that can be long and we will not display it in the health API as we will recommend checking the explain API to proceed further towards green health. --- .../DataStreamLifecycleServiceIT.java | 83 ++++++- .../src/main/java/module-info.java | 2 + .../datastreams/DataStreamFeatures.java | 28 +++ .../datastreams/DataStreamsPlugin.java | 16 +- .../DataStreamLifecycleErrorStore.java | 30 +++ .../lifecycle/DataStreamLifecycleService.java | 25 +- ...ataStreamLifecycleHealthInfoPublisher.java | 116 ++++++++++ ...lasticsearch.features.FeatureSpecification | 9 + .../DataStreamLifecycleErrorStoreTests.java | 31 +++ .../DataStreamLifecycleServiceTests.java | 27 ++- ...plainDataStreamLifecycleResponseTests.java | 6 +- ...reamLifecycleHealthInfoPublisherTests.java | 214 ++++++++++++++++++ ...sAvailabilityHealthIndicatorServiceIT.java | 6 +- .../org/elasticsearch/TransportVersions.java | 2 + .../datastreams/lifecycle/ErrorEntry.java | 11 +- .../node/DataStreamLifecycleHealthInfo.java | 34 +++ .../health/node/DslErrorInfo.java | 34 +++ .../elasticsearch/health/node/HealthInfo.java | 22 +- .../health/node/HealthInfoCache.java | 19 +- .../node/UpdateHealthInfoCacheAction.java | 64 +++++- .../health/HealthServiceTests.java | 3 +- .../node/DiskHealthIndicatorServiceTests.java | 4 +- .../node/FetchHealthInfoCacheActionTests.java | 15 +- .../health/node/HealthInfoCacheTests.java | 24 +- .../health/node/HealthInfoTests.java | 14 +- 25 files changed, 795 insertions(+), 44 deletions(-) create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java create mode 100644 modules/data-streams/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification create mode 100644 modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java create mode 100644 server/src/main/java/org/elasticsearch/health/node/DataStreamLifecycleHealthInfo.java create mode 100644 server/src/main/java/org/elasticsearch/health/node/DslErrorInfo.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index 7ac86c8aee614..d3eaee36f67f7 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -46,6 +46,9 @@ import org.elasticsearch.datastreams.DataStreamsPlugin; import org.elasticsearch.datastreams.lifecycle.action.ExplainDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.PutDataStreamLifecycleAction; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; +import org.elasticsearch.health.node.DslErrorInfo; +import org.elasticsearch.health.node.FetchHealthInfoCacheAction; import org.elasticsearch.index.Index; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -63,6 +66,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; @@ -77,7 +81,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; @@ -95,6 +101,8 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); settings.put(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL, "1s"); settings.put(DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING.getKey(), "min_docs=1,max_docs=1"); + // we'll test DSL errors reach the health node, so we're lowering the threshold over which we report errors + settings.put(DataStreamLifecycleService.DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING.getKey(), "3"); return settings.build(); } @@ -394,8 +402,8 @@ public void testErrorRecordingOnRollover() throws Exception { indexDocs(dataStreamName, 1); + String writeIndexName = getBackingIndices(dataStreamName).get(1); assertBusy(() -> { - String writeIndexName = getBackingIndices(dataStreamName).get(1); ErrorEntry writeIndexRolloverError = null; Iterable lifecycleServices = internalCluster().getInstances(DataStreamLifecycleService.class); @@ -408,6 +416,35 @@ public void testErrorRecordingOnRollover() throws Exception { assertThat(writeIndexRolloverError, is(notNullValue())); assertThat(writeIndexRolloverError.error(), containsString("maximum normal shards open")); + + ExplainDataStreamLifecycleAction.Request explainRequest = new ExplainDataStreamLifecycleAction.Request( + new String[] { dataStreamName } + ); + ExplainDataStreamLifecycleAction.Response response = client().execute(ExplainDataStreamLifecycleAction.INSTANCE, explainRequest) + .actionGet(); + boolean found = false; + for (ExplainIndexDataStreamLifecycle index : response.getIndices()) { + if (index.getError() != null && index.getError().retryCount() > 3) { + found = true; + break; + } + } + assertTrue(found); + }, 30, TimeUnit.SECONDS); + + // DSL should signal to the health node that there's an error in the store that's been retried at least 3 times + assertBusy(() -> { + FetchHealthInfoCacheAction.Response healthNodeResponse = client().execute( + FetchHealthInfoCacheAction.INSTANCE, + new FetchHealthInfoCacheAction.Request() + ).get(); + DataStreamLifecycleHealthInfo dslHealthInfoOnHealthNode = healthNodeResponse.getHealthInfo().dslHealthInfo(); + assertThat(dslHealthInfoOnHealthNode, is(not(DataStreamLifecycleHealthInfo.NO_DSL_ERRORS))); + assertThat(dslHealthInfoOnHealthNode.dslErrorsInfo().size(), is(1)); + DslErrorInfo errorInfo = dslHealthInfoOnHealthNode.dslErrorsInfo().get(0); + + assertThat(errorInfo.indexName(), is(writeIndexName)); + assertThat(errorInfo.retryCount(), greaterThanOrEqualTo(3)); }); // let's reset the cluster max shards per node limit to allow rollover to proceed and check the error store is empty @@ -429,6 +466,16 @@ public void testErrorRecordingOnRollover() throws Exception { assertThat(lifecycleService.getErrorStore().getError(previousWriteInddex), nullValue()); } }); + + // the error has been fixed so the health information shouldn't be reported anymore + assertBusy(() -> { + FetchHealthInfoCacheAction.Response healthNodeResponse = client().execute( + FetchHealthInfoCacheAction.INSTANCE, + new FetchHealthInfoCacheAction.Request() + ).get(); + DataStreamLifecycleHealthInfo dslHealthInfoOnHealthNode = healthNodeResponse.getHealthInfo().dslHealthInfo(); + assertThat(dslHealthInfoOnHealthNode, is(DataStreamLifecycleHealthInfo.NO_DSL_ERRORS)); + }); } public void testErrorRecordingOnRetention() throws Exception { @@ -470,7 +517,9 @@ public void testErrorRecordingOnRetention() throws Exception { assertThat(writeIndex, backingIndexEqualTo(dataStreamName, 2)); }); - String firstGenerationIndex = getBackingIndices(dataStreamName).get(0); + List dsBackingIndices = getBackingIndices(dataStreamName); + String firstGenerationIndex = dsBackingIndices.get(0); + String secondGenerationIndex = dsBackingIndices.get(1); // mark the first generation index as read-only so deletion fails when we enable the retention configuration updateIndexSettings(Settings.builder().put(READ_ONLY.settingName(), true), firstGenerationIndex); @@ -493,7 +542,7 @@ public void testErrorRecordingOnRetention() throws Exception { for (DataStreamLifecycleService lifecycleService : lifecycleServices) { recordedRetentionExecutionError = lifecycleService.getErrorStore().getError(firstGenerationIndex); - if (recordedRetentionExecutionError != null) { + if (recordedRetentionExecutionError != null && recordedRetentionExecutionError.retryCount() > 3) { break; } } @@ -502,6 +551,24 @@ public void testErrorRecordingOnRetention() throws Exception { assertThat(recordedRetentionExecutionError.error(), containsString("blocked by: [FORBIDDEN/5/index read-only (api)")); }); + // DSL should signal to the health node that there's an error in the store that's been retried at least 3 times + assertBusy(() -> { + FetchHealthInfoCacheAction.Response healthNodeResponse = client().execute( + FetchHealthInfoCacheAction.INSTANCE, + new FetchHealthInfoCacheAction.Request() + ).get(); + DataStreamLifecycleHealthInfo dslHealthInfoOnHealthNode = healthNodeResponse.getHealthInfo().dslHealthInfo(); + assertThat(dslHealthInfoOnHealthNode, is(not(DataStreamLifecycleHealthInfo.NO_DSL_ERRORS))); + // perhaps surprisingly rollover and delete are error-ing due to the read_only block on the first generation + // index which prevents metadata updates so rolling over the data stream is also blocked (note that both indices error at + // the same time so they'll have an equal retry count - the order becomes of the results, usually ordered by retry count, + // becomes non deterministic, hence the dynamic matching of index name) + assertThat(dslHealthInfoOnHealthNode.dslErrorsInfo().size(), is(2)); + DslErrorInfo errorInfo = dslHealthInfoOnHealthNode.dslErrorsInfo().get(0); + assertThat(errorInfo.retryCount(), greaterThanOrEqualTo(3)); + assertThat(List.of(firstGenerationIndex, secondGenerationIndex).contains(errorInfo.indexName()), is(true)); + }); + // let's mark the index as writeable and make sure it's deleted and the error store is empty updateIndexSettings(Settings.builder().put(READ_ONLY.settingName(), false), firstGenerationIndex); @@ -521,6 +588,16 @@ public void testErrorRecordingOnRetention() throws Exception { assertThat(lifecycleService.getErrorStore().getError(firstGenerationIndex), nullValue()); } }); + + // health info for DSL should be EMPTY as everything's healthy + assertBusy(() -> { + FetchHealthInfoCacheAction.Response healthNodeResponse = client().execute( + FetchHealthInfoCacheAction.INSTANCE, + new FetchHealthInfoCacheAction.Request() + ).get(); + DataStreamLifecycleHealthInfo dslHealthInfoOnHealthNode = healthNodeResponse.getHealthInfo().dslHealthInfo(); + assertThat(dslHealthInfoOnHealthNode, is(DataStreamLifecycleHealthInfo.NO_DSL_ERRORS)); + }); } finally { // when the test executes successfully this will not be needed however, otherwise we need to make sure the index is // "delete-able" for test cleanup diff --git a/modules/data-streams/src/main/java/module-info.java b/modules/data-streams/src/main/java/module-info.java index 385c550d770e0..f21edca356ad9 100644 --- a/modules/data-streams/src/main/java/module-info.java +++ b/modules/data-streams/src/main/java/module-info.java @@ -16,4 +16,6 @@ exports org.elasticsearch.datastreams.action to org.elasticsearch.server; exports org.elasticsearch.datastreams.lifecycle.action to org.elasticsearch.server; exports org.elasticsearch.datastreams.lifecycle; + + provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.datastreams.DataStreamFeatures; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java new file mode 100644 index 0000000000000..53b3ca3353bab --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; + +import java.util.Set; + +/** + * Provides the features for data streams that this version of the code supports + */ +public class DataStreamFeatures implements FeatureSpecification { + + @Override + public Set getFeatures() { + return Set.of( + DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE // Added in 8.12 + ); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index dd8e13cf18408..9ac3a1afed5a5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -47,6 +47,7 @@ import org.elasticsearch.datastreams.lifecycle.action.TransportGetDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.action.TransportGetDataStreamLifecycleStatsAction; import org.elasticsearch.datastreams.lifecycle.action.TransportPutDataStreamLifecycleAction; +import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.datastreams.lifecycle.rest.RestDataStreamLifecycleStatsAction; import org.elasticsearch.datastreams.lifecycle.rest.RestDeleteDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.rest.RestExplainDataStreamLifecycleAction; @@ -110,7 +111,7 @@ public class DataStreamsPlugin extends Plugin implements ActionPlugin { private final SetOnce errorStoreInitialisationService = new SetOnce<>(); private final SetOnce dataLifecycleInitialisationService = new SetOnce<>(); - + private final SetOnce dataStreamLifecycleErrorsPublisher = new SetOnce<>(); private final Settings settings; public DataStreamsPlugin(Settings settings) { @@ -160,6 +161,15 @@ public Collection createComponents(PluginServices services) { this.updateTimeSeriesRangeService.set(updateTimeSeriesRangeService); components.add(this.updateTimeSeriesRangeService.get()); errorStoreInitialisationService.set(new DataStreamLifecycleErrorStore(services.threadPool()::absoluteTimeInMillis)); + dataStreamLifecycleErrorsPublisher.set( + new DataStreamLifecycleHealthInfoPublisher( + settings, + services.client(), + services.clusterService(), + errorStoreInitialisationService.get(), + services.featureService() + ) + ); dataLifecycleInitialisationService.set( new DataStreamLifecycleService( settings, @@ -169,12 +179,14 @@ public Collection createComponents(PluginServices services) { services.threadPool(), services.threadPool()::absoluteTimeInMillis, errorStoreInitialisationService.get(), - services.allocationService() + services.allocationService(), + dataStreamLifecycleErrorsPublisher.get() ) ); dataLifecycleInitialisationService.get().init(); components.add(errorStoreInitialisationService.get()); components.add(dataLifecycleInitialisationService.get()); + components.add(dataStreamLifecycleErrorsPublisher.get()); return components; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStore.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStore.java index 01ccbdbe3ffec..a1f0c639f51f1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStore.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStore.java @@ -12,11 +12,16 @@ import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.health.node.DslErrorInfo; +import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.LongSupplier; +import java.util.function.Predicate; +import java.util.stream.Collectors; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; @@ -90,4 +95,29 @@ public ErrorEntry getError(String indexName) { public Set getAllIndices() { return Set.copyOf(indexNameToError.keySet()); } + + /** + * Retrieve the error entries in the error store that satisfy the provided predicate. + * This will return the error entries information (a subset of all the fields an {@link ErrorEntry} holds) sorted by the number of + * retries DSL attempted (descending order) and the number of entries will be limited according to the provided limit parameter. + * Returns empty list if no entries are present in the error store or none satisfy the predicate. + */ + public List getErrorsInfo(Predicate errorEntryPredicate, int limit) { + if (indexNameToError.isEmpty()) { + return List.of(); + } + return indexNameToError.entrySet() + .stream() + .filter(keyValue -> errorEntryPredicate.test(keyValue.getValue())) + .sorted(Map.Entry.comparingByValue()) + .limit(limit) + .map( + keyValue -> new DslErrorInfo( + keyValue.getKey(), + keyValue.getValue().firstOccurrenceTimestamp(), + keyValue.getValue().retryCount() + ) + ) + .collect(Collectors.toList()); + } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 9f9a90704167d..4d2c2af2266b1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -62,6 +62,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.datastreams.lifecycle.downsampling.DeleteSourceAndAddDownsampleIndexExecutor; import org.elasticsearch.datastreams.lifecycle.downsampling.DeleteSourceAndAddDownsampleToDS; +import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; @@ -158,6 +159,7 @@ public class DataStreamLifecycleService implements ClusterStateListener, Closeab private final ThreadPool threadPool; final ResultDeduplicator transportActionsDeduplicator; final ResultDeduplicator clusterStateChangesDeduplicator; + private final DataStreamLifecycleHealthInfoPublisher dslHealthInfoPublisher; private LongSupplier nowSupplier; private final Clock clock; private final DataStreamLifecycleErrorStore errorStore; @@ -204,7 +206,8 @@ public DataStreamLifecycleService( ThreadPool threadPool, LongSupplier nowSupplier, DataStreamLifecycleErrorStore errorStore, - AllocationService allocationService + AllocationService allocationService, + DataStreamLifecycleHealthInfoPublisher dataStreamLifecycleHealthInfoPublisher ) { this.settings = settings; this.client = client; @@ -232,6 +235,7 @@ public DataStreamLifecycleService( Priority.URGENT, // urgent priority as this deletes indices new DeleteSourceAndAddDownsampleIndexExecutor(allocationService) ); + this.dslHealthInfoPublisher = dataStreamLifecycleHealthInfoPublisher; } /** @@ -296,6 +300,25 @@ public void triggered(SchedulerEngine.Event event) { event.getTriggeredTime() ); run(clusterService.state()); + dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + assert acknowledgedResponse.isAcknowledged() : "updating the health info is always acknowledged"; + } + + @Override + public void onFailure(Exception e) { + logger.debug( + String.format( + Locale.ROOT, + "unable to update the health cache with DSL errors related information " + + "due to [%s]. Will retry on the next DSL run", + e.getMessage() + ), + e + ); + } + }); } } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java new file mode 100644 index 0000000000000..12abbe125cffb --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisher.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams.lifecycle.health; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; +import org.elasticsearch.health.node.DslErrorInfo; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; +import org.elasticsearch.health.node.selection.HealthNode; + +import java.util.List; + +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING; + +/** + * Provides the infrastructure to send errors encountered by indices managed by data stream lifecycle service to the health node. + */ +public class DataStreamLifecycleHealthInfoPublisher { + private static final Logger logger = LogManager.getLogger(DataStreamLifecycleHealthInfoPublisher.class); + /** + * Controls the number of DSL error entries we publish to the health node. + */ + public static final Setting DATA_STREAM_LIFECYCLE_MAX_ERRORS_TO_PUBLISH_SETTING = Setting.intSetting( + "data_streams.lifecycle.max_errors_to_publish", + 500, + 0, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + public static final NodeFeature DSL_HEALTH_INFO_FEATURE = new NodeFeature("health.dsl.info"); + + private final Client client; + private final ClusterService clusterService; + private final DataStreamLifecycleErrorStore errorStore; + private final FeatureService featureService; + private volatile int signallingErrorRetryInterval; + private volatile int maxNumberOfErrorsToPublish; + + public DataStreamLifecycleHealthInfoPublisher( + Settings settings, + Client client, + ClusterService clusterService, + DataStreamLifecycleErrorStore errorStore, + FeatureService featureService + ) { + this.client = client; + this.clusterService = clusterService; + this.errorStore = errorStore; + this.featureService = featureService; + this.signallingErrorRetryInterval = DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING.get(settings); + this.maxNumberOfErrorsToPublish = DATA_STREAM_LIFECYCLE_MAX_ERRORS_TO_PUBLISH_SETTING.get(settings); + } + + public void init() { + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING, this::updateSignallingRetryThreshold); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(DATA_STREAM_LIFECYCLE_MAX_ERRORS_TO_PUBLISH_SETTING, this::updateNumberOfErrorsToPublish); + } + + private void updateSignallingRetryThreshold(int newValue) { + this.signallingErrorRetryInterval = newValue; + } + + private void updateNumberOfErrorsToPublish(int newValue) { + this.maxNumberOfErrorsToPublish = newValue; + } + + /** + * Publishes the DSL errors that have passed the signaling threshold (as defined by + * {@link org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService#DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING} + */ + public void publishDslErrorEntries(ActionListener actionListener) { + if (featureService.clusterHasFeature(clusterService.state(), DSL_HEALTH_INFO_FEATURE) == false) { + return; + } + // fetching the entries that persist in the error store for more than the signalling retry interval + // note that we're reporting this view into the error store on every publishing iteration + List errorEntriesToSignal = errorStore.getErrorsInfo( + entry -> entry.retryCount() >= signallingErrorRetryInterval, + maxNumberOfErrorsToPublish + ); + DiscoveryNode currentHealthNode = HealthNode.findHealthNode(clusterService.state()); + if (currentHealthNode != null) { + String healthNodeId = currentHealthNode.getId(); + logger.trace("reporting [{}] DSL error entries to to health node [{}]", errorEntriesToSignal.size(), healthNodeId); + client.execute( + UpdateHealthInfoCacheAction.INSTANCE, + new UpdateHealthInfoCacheAction.Request( + healthNodeId, + new DataStreamLifecycleHealthInfo(errorEntriesToSignal, errorStore.getAllIndices().size()) + ), + actionListener + ); + } else { + logger.trace("unable to report DSL health because there is no health node in the cluster. will retry on the next DSL run"); + } + } +} diff --git a/modules/data-streams/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/modules/data-streams/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification new file mode 100644 index 0000000000000..3f1579eac4f85 --- /dev/null +++ b/modules/data-streams/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -0,0 +1,9 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the Server Side Public License, v 1; you may not use this file except +# in compliance with, at your election, the Elastic License 2.0 or the Server +# Side Public License, v 1. +# + +org.elasticsearch.datastreams.DataStreamFeatures diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStoreTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStoreTests.java index 9f1928374eb5f..41e0f6578cb7d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStoreTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleErrorStoreTests.java @@ -9,10 +9,13 @@ package org.elasticsearch.datastreams.lifecycle; import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; +import org.elasticsearch.health.node.DslErrorInfo; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.List; import java.util.Set; +import java.util.stream.IntStream; import java.util.stream.Stream; import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore.MAX_ERROR_MESSAGE_LENGTH; @@ -84,4 +87,32 @@ public void testRecordedErrorIsMaxOneThousandChars() { assertThat(errorStore.getError("test"), is(notNullValue())); assertThat(errorStore.getError("test").error().length(), is(MAX_ERROR_MESSAGE_LENGTH)); } + + public void testGetFilteredEntries() { + IntStream.range(0, 20).forEach(i -> errorStore.recordError("test20", new NullPointerException("testing"))); + IntStream.range(0, 5).forEach(i -> errorStore.recordError("test5", new NullPointerException("testing"))); + + { + List entries = errorStore.getErrorsInfo(entry -> entry.retryCount() > 7, 100); + assertThat(entries.size(), is(1)); + assertThat(entries.get(0).indexName(), is("test20")); + } + + { + List entries = errorStore.getErrorsInfo(entry -> entry.retryCount() > 7, 0); + assertThat(entries.size(), is(0)); + } + + { + List entries = errorStore.getErrorsInfo(entry -> entry.retryCount() > 50, 100); + assertThat(entries.size(), is(0)); + } + + { + List entries = errorStore.getErrorsInfo(entry -> entry.retryCount() > 2, 100); + assertThat(entries.size(), is(2)); + assertThat(entries.get(0).indexName(), is("test20")); + assertThat(entries.get(1).indexName(), is("test5")); + } + } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index 2445e6b0d72df..befa16573de23 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -62,6 +62,9 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.datastreams.DataStreamFeatures; +import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; @@ -159,6 +162,7 @@ public void setupServices() { EmptySnapshotsInfoService.INSTANCE, TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY ); + DataStreamLifecycleErrorStore errorStore = new DataStreamLifecycleErrorStore(() -> now); dataStreamLifecycleService = new DataStreamLifecycleService( Settings.EMPTY, client, @@ -166,8 +170,15 @@ public void setupServices() { clock, threadPool, () -> now, - new DataStreamLifecycleErrorStore(() -> now), - allocationService + errorStore, + allocationService, + new DataStreamLifecycleHealthInfoPublisher( + Settings.EMPTY, + client, + clusterService, + errorStore, + new FeatureService(List.of(new DataStreamFeatures())) + ) ); clientDelegate = null; dataStreamLifecycleService.init(); @@ -1383,6 +1394,7 @@ public void testTimeSeriesIndicesStillWithinTimeBounds() { public void testTrackingTimeStats() { AtomicLong now = new AtomicLong(0); long delta = randomLongBetween(10, 10000); + DataStreamLifecycleErrorStore errorStore = new DataStreamLifecycleErrorStore(() -> Clock.systemUTC().millis()); DataStreamLifecycleService service = new DataStreamLifecycleService( Settings.EMPTY, getTransportRequestsRecordingClient(), @@ -1390,8 +1402,15 @@ public void testTrackingTimeStats() { Clock.systemUTC(), threadPool, () -> now.getAndAdd(delta), - new DataStreamLifecycleErrorStore(() -> Clock.systemUTC().millis()), - mock(AllocationService.class) + errorStore, + mock(AllocationService.class), + new DataStreamLifecycleHealthInfoPublisher( + Settings.EMPTY, + getTransportRequestsRecordingClient(), + clusterService, + errorStore, + new FeatureService(List.of(new DataStreamFeatures())) + ) ); assertThat(service.getLastRunDuration(), is(nullValue())); assertThat(service.getTimeBetweenStarts(), is(nullValue())); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java index 0c10e3964e168..829fe454f7463 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/action/ExplainDataStreamLifecycleResponseTests.java @@ -195,8 +195,9 @@ public void testToXContent() throws IOException { } { // Make sure generation_date is not present if it is null (which it is for a write index): + String index = randomAlphaOfLengthBetween(10, 30); ExplainIndexDataStreamLifecycle explainIndexWithNullGenerationDate = new ExplainIndexDataStreamLifecycle( - randomAlphaOfLengthBetween(10, 30), + index, true, now, randomBoolean() ? now + TimeValue.timeValueDays(1).getMillis() : null, @@ -252,8 +253,9 @@ private static ExplainIndexDataStreamLifecycle createRandomIndexDataStreamLifecy long now, @Nullable DataStreamLifecycle lifecycle ) { + String index = randomAlphaOfLengthBetween(10, 30); return new ExplainIndexDataStreamLifecycle( - randomAlphaOfLengthBetween(10, 30), + index, true, now, randomBoolean() ? now + TimeValue.timeValueDays(1).getMillis() : null, diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java new file mode 100644 index 0000000000000..31393a3fc18ed --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthInfoPublisherTests.java @@ -0,0 +1,214 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.datastreams.lifecycle.health; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.datastreams.DataStreamFeatures; +import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; +import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; +import org.elasticsearch.features.FeatureService; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; +import org.elasticsearch.health.node.DslErrorInfo; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; +import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.threadpool.TestThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; + +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_MERGE_POLICY_TARGET_FACTOR_SETTING; +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_MERGE_POLICY_TARGET_FLOOR_SEGMENT_SETTING; +import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING; +import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class DataStreamLifecycleHealthInfoPublisherTests extends ESTestCase { + + private long now; + private ClusterService clusterService; + private TestThreadPool threadPool; + private CopyOnWriteArrayList clientSeenRequests; + private DataStreamLifecycleHealthInfoPublisher dslHealthInfoPublisher; + private final DiscoveryNode node1 = DiscoveryNodeUtils.builder("node_1") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE)) + .build(); + private final DiscoveryNode node2 = DiscoveryNodeUtils.builder("node_2") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE)) + .build(); + private final DiscoveryNode[] allNodes = new DiscoveryNode[] { node1, node2 }; + private DataStreamLifecycleErrorStore errorStore; + + @Before + public void setupServices() { + threadPool = new TestThreadPool(getTestName()); + Set> builtInClusterSettings = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + builtInClusterSettings.add(DataStreamLifecycleService.DATA_STREAM_LIFECYCLE_POLL_INTERVAL_SETTING); + builtInClusterSettings.add(DATA_STREAM_MERGE_POLICY_TARGET_FLOOR_SEGMENT_SETTING); + builtInClusterSettings.add(DATA_STREAM_MERGE_POLICY_TARGET_FACTOR_SETTING); + builtInClusterSettings.add(DATA_STREAM_SIGNALLING_ERROR_RETRY_INTERVAL_SETTING); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, builtInClusterSettings); + clusterService = createClusterService(threadPool, clusterSettings); + + now = System.currentTimeMillis(); + clientSeenRequests = new CopyOnWriteArrayList<>(); + + final Client client = getTransportRequestsRecordingClient(); + errorStore = new DataStreamLifecycleErrorStore(() -> now); + dslHealthInfoPublisher = new DataStreamLifecycleHealthInfoPublisher( + Settings.EMPTY, + client, + clusterService, + errorStore, + new FeatureService(List.of(new DataStreamFeatures())) + ); + } + + @After + public void cleanup() { + clientSeenRequests.clear(); + clusterService.close(); + threadPool.shutdownNow(); + } + + public void testPublishDslErrorEntries() { + for (int i = 0; i < 11; i++) { + errorStore.recordError("testIndexOverSignalThreshold", new NullPointerException("ouch")); + } + errorStore.recordError("testIndex", new IllegalStateException("bad state")); + ClusterState stateWithHealthNode = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); + stateWithHealthNode = ClusterState.builder(stateWithHealthNode) + .nodeFeatures( + Map.of( + node1.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), + node2.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) + ) + ) + .build(); + ClusterServiceUtils.setState(clusterService, stateWithHealthNode); + dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) {} + + @Override + public void onFailure(Exception e) { + + } + }); + + assertThat(clientSeenRequests.size(), is(1)); + DataStreamLifecycleHealthInfo dslHealthInfo = clientSeenRequests.get(0).getDslHealthInfo(); + assertThat(dslHealthInfo, is(notNullValue())); + List dslErrorsInfo = dslHealthInfo.dslErrorsInfo(); + assertThat(dslErrorsInfo.size(), is(1)); + assertThat(dslErrorsInfo.get(0).indexName(), is("testIndexOverSignalThreshold")); + assertThat(dslHealthInfo.totalErrorEntriesCount(), is(2)); + } + + public void testPublishDslErrorEntriesNoHealthNode() { + // no requests are being executed + for (int i = 0; i < 11; i++) { + errorStore.recordError("testIndexOverSignalThreshold", new NullPointerException("ouch")); + } + errorStore.recordError("testIndex", new IllegalStateException("bad state")); + + ClusterState stateNoHealthNode = ClusterStateCreationUtils.state(node1, node1, null, allNodes); + stateNoHealthNode = ClusterState.builder(stateNoHealthNode) + .nodeFeatures( + Map.of( + node1.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), + node2.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) + ) + ) + .build(); + ClusterServiceUtils.setState(clusterService, stateNoHealthNode); + dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) {} + + @Override + public void onFailure(Exception e) { + + } + }); + + assertThat(clientSeenRequests.size(), is(0)); + } + + public void testPublishDslErrorEntriesEmptyErrorStore() { + // publishes the empty error store (this is the "back to healthy" state where all errors have been fixed) + ClusterState state = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); + state = ClusterState.builder(state) + .nodeFeatures( + Map.of( + node1.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()), + node2.getId(), + Set.of(DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE.id()) + ) + ) + .build(); + ClusterServiceUtils.setState(clusterService, state); + dslHealthInfoPublisher.publishDslErrorEntries(new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) {} + + @Override + public void onFailure(Exception e) { + + } + }); + + assertThat(clientSeenRequests.size(), is(1)); + DataStreamLifecycleHealthInfo dslHealthInfo = clientSeenRequests.get(0).getDslHealthInfo(); + assertThat(dslHealthInfo, is(notNullValue())); + List dslErrorsInfo = dslHealthInfo.dslErrorsInfo(); + assertThat(dslErrorsInfo.size(), is(0)); + assertThat(dslHealthInfo.totalErrorEntriesCount(), is(0)); + } + + private Client getTransportRequestsRecordingClient() { + return new NoOpClient(threadPool) { + @Override + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + clientSeenRequests.add((UpdateHealthInfoCacheAction.Request) request); + } + }; + } + +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java index b862d0b2f20b6..92194a94ab44c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.test.ESIntegTestCase; @@ -132,7 +133,10 @@ private void assertHealthDuring(Matcher statusMatcher, Runnable ac @Override public void clusterChanged(ClusterChangedEvent event) { states.add( - new RoutingNodesAndHealth(event.state().getRoutingNodes(), service.calculate(false, 1, new HealthInfo(Map.of()))) + new RoutingNodesAndHealth( + event.state().getRoutingNodes(), + service.calculate(false, 1, new HealthInfo(Map.of(), DataStreamLifecycleHealthInfo.NO_DSL_ERRORS)) + ) ); } }; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index ca79be9453cfe..7c3568986dccd 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -186,6 +186,8 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_HF_SERVICE_ADDED = def(8_553_00_0); public static final TransportVersion INFERENCE_USAGE_ADDED = def(8_554_00_0); public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_555_00_0); + public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_DSL_STATUS = def(8_556_00_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ErrorEntry.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ErrorEntry.java index 79c59314d7425..62406ccd4f853 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ErrorEntry.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/ErrorEntry.java @@ -25,7 +25,8 @@ public record ErrorEntry(long firstOccurrenceTimestamp, String error, long recordedTimestamp, int retryCount) implements Writeable, - ToXContentObject { + ToXContentObject, + Comparable { public ErrorEntry(StreamInput in) throws IOException { this(in.readLong(), in.readString(), in.readLong(), in.readInt()); @@ -77,4 +78,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(recordedTimestamp); out.writeInt(retryCount); } + + /** + * Compares two error entries by the number of retries, in reversed order by default. + */ + @Override + public int compareTo(ErrorEntry o) { + return Integer.compare(o.retryCount, retryCount); + } } diff --git a/server/src/main/java/org/elasticsearch/health/node/DataStreamLifecycleHealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/DataStreamLifecycleHealthInfo.java new file mode 100644 index 0000000000000..c94ca87104718 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/node/DataStreamLifecycleHealthInfo.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.List; + +/** + * Represents the data stream lifecycle information that would help shape the functionality's health. + */ +public record DataStreamLifecycleHealthInfo(List dslErrorsInfo, int totalErrorEntriesCount) implements Writeable { + + public static final DataStreamLifecycleHealthInfo NO_DSL_ERRORS = new DataStreamLifecycleHealthInfo(List.of(), 0); + + public DataStreamLifecycleHealthInfo(StreamInput in) throws IOException { + this(in.readCollectionAsList(DslErrorInfo::new), in.readVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(dslErrorsInfo); + out.writeVInt(totalErrorEntriesCount); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/node/DslErrorInfo.java b/server/src/main/java/org/elasticsearch/health/node/DslErrorInfo.java new file mode 100644 index 0000000000000..7c037ffe085d8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/node/DslErrorInfo.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +/** + * Represents a reduced view into an {@link org.elasticsearch.action.datastreams.lifecycle.ErrorEntry}, removing the + * exception message and last occurrence timestamp as we could potentially send thousands of entries over the wire + * and the omitted fields would not be used. + */ +public record DslErrorInfo(String indexName, long firstOccurrence, int retryCount) implements Writeable { + + public DslErrorInfo(StreamInput in) throws IOException { + this(in.readString(), in.readLong(), in.readVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(indexName); + out.writeLong(firstOccurrence); + out.writeVInt(retryCount); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java index e8e9dd9747a9f..0bb8027f8299d 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java @@ -8,26 +8,42 @@ package org.elasticsearch.health.node; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; import java.io.IOException; import java.util.Map; +import static org.elasticsearch.health.node.DataStreamLifecycleHealthInfo.NO_DSL_ERRORS; + /** * This class wraps all the data returned by the health node. * @param diskInfoByNode A Map of node id to DiskHealthInfo for that node + * @param dslHealthInfo The data stream lifecycle health information */ -public record HealthInfo(Map diskInfoByNode) implements Writeable { - public static final HealthInfo EMPTY_HEALTH_INFO = new HealthInfo(Map.of()); +public record HealthInfo(Map diskInfoByNode, @Nullable DataStreamLifecycleHealthInfo dslHealthInfo) + implements + Writeable { + + public static final HealthInfo EMPTY_HEALTH_INFO = new HealthInfo(Map.of(), NO_DSL_ERRORS); public HealthInfo(StreamInput input) throws IOException { - this(input.readMap(DiskHealthInfo::new)); + this( + input.readMap(DiskHealthInfo::new), + input.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS) + ? input.readOptionalWriteable(DataStreamLifecycleHealthInfo::new) + : null + ); } @Override public void writeTo(StreamOutput output) throws IOException { output.writeMap(diskInfoByNode, StreamOutput::writeWriteable); + if (output.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS)) { + output.writeOptionalWriteable(dslHealthInfo); + } } } diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java index bb295f6401941..986b5e13dce6e 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.Nullable; import org.elasticsearch.health.node.selection.HealthNode; import java.util.Map; @@ -26,6 +27,8 @@ public class HealthInfoCache implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(HealthInfoCache.class); private volatile ConcurrentHashMap diskInfoByNode = new ConcurrentHashMap<>(); + @Nullable + private volatile DataStreamLifecycleHealthInfo dslHealthInfo = null; private HealthInfoCache() {} @@ -35,8 +38,17 @@ public static HealthInfoCache create(ClusterService clusterService) { return healthInfoCache; } - public void updateNodeHealth(String nodeId, DiskHealthInfo diskHealthInfo) { - diskInfoByNode.put(nodeId, diskHealthInfo); + public void updateNodeHealth( + String nodeId, + @Nullable DiskHealthInfo diskHealthInfo, + @Nullable DataStreamLifecycleHealthInfo latestDslHealthInfo + ) { + if (diskHealthInfo != null) { + diskInfoByNode.put(nodeId, diskHealthInfo); + } + if (latestDslHealthInfo != null) { + dslHealthInfo = latestDslHealthInfo; + } } @Override @@ -56,6 +68,7 @@ public void clusterChanged(ClusterChangedEvent event) { } else if (diskInfoByNode.isEmpty() == false) { logger.debug("Node [{}][{}] is no longer the health node, emptying the cache.", localNode.getName(), localNode.getId()); diskInfoByNode = new ConcurrentHashMap<>(); + dslHealthInfo = null; } } @@ -65,6 +78,6 @@ public void clusterChanged(ClusterChangedEvent event) { */ public HealthInfo getHealthInfo() { // A shallow copy is enough because the inner data is immutable. - return new HealthInfo(Map.copyOf(diskInfoByNode)); + return new HealthInfo(Map.copyOf(diskInfoByNode), dslHealthInfo); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java index 1499c278a4209..d1961c597bc1e 100644 --- a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.health.node; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; @@ -18,6 +19,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.health.node.action.HealthNodeRequest; import org.elasticsearch.health.node.action.TransportHealthNodeAction; import org.elasticsearch.tasks.Task; @@ -36,17 +38,37 @@ public class UpdateHealthInfoCacheAction extends ActionType listener ) { - nodeHealthOverview.updateNodeHealth(request.getNodeId(), request.getDiskHealthInfo()); + nodeHealthOverview.updateNodeHealth(request.getNodeId(), request.getDiskHealthInfo(), request.getDslHealthInfo()); listener.onResponse(AcknowledgedResponse.of(true)); } } diff --git a/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java index 7d7eb5c1a5697..8ca531b678c4a 100644 --- a/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; import org.elasticsearch.health.node.DiskHealthInfo; import org.elasticsearch.health.node.FetchHealthInfoCacheAction; import org.elasticsearch.health.node.HealthInfo; @@ -252,7 +253,7 @@ public void testThatIndicatorsGetHealthInfoData() throws Exception { randomAlphaOfLength(30), new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) ); - HealthInfo healthInfo = new HealthInfo(diskHealthInfoMap); + HealthInfo healthInfo = new HealthInfo(diskHealthInfoMap, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); var service = new HealthService( // The preflight indicator does not get data because the data is not fetched until after the preflight check diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index 6ec9acfb44c00..1584c4a57dd32 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -258,7 +258,7 @@ public void testRedNoBlockedIndicesAndRedAllRoleNodes() throws IOException { diskInfoByNode.put(discoveryNode.getId(), new DiskHealthInfo(HealthStatus.GREEN)); } } - HealthInfo healthInfo = new HealthInfo(diskInfoByNode); + HealthInfo healthInfo = new HealthInfo(diskInfoByNode, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(HealthStatus.RED)); @@ -1021,7 +1021,7 @@ private HealthInfo createHealthInfo(List healthInfoConfigs) { diskInfoByNode.put(node.getId(), diskHealthInfo); } } - return new HealthInfo(diskInfoByNode); + return new HealthInfo(diskInfoByNode, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); } private static ClusterService createClusterService(Collection nodes, boolean withBlockedIndex) { diff --git a/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java b/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java index 2200f59d3510b..f921c03686da4 100644 --- a/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java @@ -35,6 +35,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.health.node.HealthInfoTests.randomDslHealthInfo; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; @@ -101,7 +102,7 @@ public void testAction() throws ExecutionException, InterruptedException { setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, localNode, allNodes)); HealthInfoCache healthInfoCache = getTestHealthInfoCache(); final FetchHealthInfoCacheAction.Response expectedResponse = new FetchHealthInfoCacheAction.Response( - new HealthInfo(healthInfoCache.getHealthInfo().diskInfoByNode()) + new HealthInfo(healthInfoCache.getHealthInfo().diskInfoByNode(), healthInfoCache.getHealthInfo().dslHealthInfo()) ); ActionTestUtils.execute( new FetchHealthInfoCacheAction.TransportAction( @@ -126,7 +127,8 @@ private HealthInfoCache getTestHealthInfoCache() { String nodeId = allNode.getId(); healthInfoCache.updateNodeHealth( nodeId, - new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) + new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())), + randomDslHealthInfo() ); } return healthInfoCache; @@ -134,7 +136,7 @@ private HealthInfoCache getTestHealthInfoCache() { public void testResponseSerialization() { FetchHealthInfoCacheAction.Response response = new FetchHealthInfoCacheAction.Response( - new HealthInfo(getTestHealthInfoCache().getHealthInfo().diskInfoByNode()) + new HealthInfo(getTestHealthInfoCache().getHealthInfo().diskInfoByNode(), DataStreamLifecycleHealthInfo.NO_DSL_ERRORS) ); EqualsHashCodeTestUtils.checkEqualsAndHashCode( response, @@ -150,6 +152,11 @@ private FetchHealthInfoCacheAction.Response mutateResponse(FetchHealthInfoCacheA randomAlphaOfLength(10), new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) ); - return new FetchHealthInfoCacheAction.Response(new HealthInfo(diskHealthInfoMapCopy)); + return new FetchHealthInfoCacheAction.Response( + new HealthInfo( + diskHealthInfoMapCopy, + randomValueOtherThan(originalResponse.getHealthInfo().dslHealthInfo(), HealthInfoTests::randomDslHealthInfo) + ) + ); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java b/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java index a278c9f17b330..fec3504d17218 100644 --- a/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java @@ -21,7 +21,9 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.health.node.HealthInfoTests.randomDslHealthInfo; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; @@ -43,20 +45,24 @@ public class HealthInfoCacheTests extends ESTestCase { public void testAddHealthInfo() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN); - healthInfoCache.updateNodeHealth(node2.getId(), RED); + DataStreamLifecycleHealthInfo latestDslHealthInfo = randomDslHealthInfo(); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, latestDslHealthInfo); + healthInfoCache.updateNodeHealth(node2.getId(), RED, null); Map diskHealthInfo = healthInfoCache.getHealthInfo().diskInfoByNode(); - healthInfoCache.updateNodeHealth(node1.getId(), RED); + healthInfoCache.updateNodeHealth(node1.getId(), RED, null); assertThat(diskHealthInfo.get(node1.getId()), equalTo(GREEN)); assertThat(diskHealthInfo.get(node2.getId()), equalTo(RED)); + // dsl health info has not changed as a new value has not been reported + assertThat(healthInfoCache.getHealthInfo().dslHealthInfo(), is(latestDslHealthInfo)); } public void testRemoveNodeFromTheCluster() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN); - healthInfoCache.updateNodeHealth(node2.getId(), RED); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, null); + DataStreamLifecycleHealthInfo latestDslHealthInfo = randomDslHealthInfo(); + healthInfoCache.updateNodeHealth(node2.getId(), RED, latestDslHealthInfo); ClusterState previous = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); ClusterState current = ClusterStateCreationUtils.state(node1, node1, node1, new DiscoveryNode[] { node1 }); @@ -65,12 +71,15 @@ public void testRemoveNodeFromTheCluster() { Map diskHealthInfo = healthInfoCache.getHealthInfo().diskInfoByNode(); assertThat(diskHealthInfo.get(node1.getId()), equalTo(GREEN)); assertThat(diskHealthInfo.get(node2.getId()), nullValue()); + // the dsl info is not removed when the node that reported it leaves the cluster as the next DSL run will report it and + // override it (if the health node stops being the designated health node the health cache nullifies the existing DSL info) + assertThat(healthInfoCache.getHealthInfo().dslHealthInfo(), is(latestDslHealthInfo)); } public void testNotAHealthNode() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN); - healthInfoCache.updateNodeHealth(node2.getId(), RED); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, randomDslHealthInfo()); + healthInfoCache.updateNodeHealth(node2.getId(), RED, null); ClusterState previous = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); ClusterState current = ClusterStateCreationUtils.state(node1, node1, node2, allNodes); @@ -78,5 +87,6 @@ public void testNotAHealthNode() { Map diskHealthInfo = healthInfoCache.getHealthInfo().diskInfoByNode(); assertThat(diskHealthInfo.isEmpty(), equalTo(true)); + assertThat(healthInfoCache.getHealthInfo().dslHealthInfo(), is(nullValue())); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java b/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java index 371d19c9bb22d..c8ccda1c5b88d 100644 --- a/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java @@ -31,7 +31,7 @@ protected HealthInfo createTestInstance() { : new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())); diskInfoByNode.put(randomAlphaOfLengthBetween(10, 100), diskHealthInfo); } - return new HealthInfo(diskInfoByNode); + return new HealthInfo(diskInfoByNode, randomBoolean() ? randomDslHealthInfo() : null); } @Override @@ -67,6 +67,16 @@ public HealthInfo mutateInstance(HealthInfo originalHealthInfo) { default -> throw new IllegalStateException(); } } - return new HealthInfo(diskHealthInfoMapCopy); + return new HealthInfo( + diskHealthInfoMapCopy, + randomValueOtherThan(originalHealthInfo.dslHealthInfo(), HealthInfoTests::randomDslHealthInfo) + ); + } + + static DataStreamLifecycleHealthInfo randomDslHealthInfo() { + return new DataStreamLifecycleHealthInfo( + randomList(5, () -> new DslErrorInfo(randomAlphaOfLength(100), System.currentTimeMillis(), randomIntBetween(15, 500))), + randomIntBetween(6, 1000) + ); } } From 56170b4be737034d318b39ce89f29bd01e0e6c36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Tue, 5 Dec 2023 10:37:05 +0100 Subject: [PATCH 206/263] [Transform] Ensure transform `_schedule_now` API only triggers the expected transform task (#102958) --- docs/changelog/102958.yaml | 7 +++++++ .../transform/action/ScheduleNowTransformAction.java | 10 ++++++++++ .../action/ScheduleNowTransformActionRequestTests.java | 9 +++++++++ 3 files changed, 26 insertions(+) create mode 100644 docs/changelog/102958.yaml diff --git a/docs/changelog/102958.yaml b/docs/changelog/102958.yaml new file mode 100644 index 0000000000000..bb357c1eb09b5 --- /dev/null +++ b/docs/changelog/102958.yaml @@ -0,0 +1,7 @@ +pr: 102958 +summary: Ensure transform `_schedule_now` API only triggers the expected transform + task +area: Transform +type: bug +issues: + - 102956 diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java index f48e06a3f743c..6a50bd40517e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.transform.TransformField; @@ -94,6 +95,15 @@ public boolean equals(Object obj) { // the base class does not implement equals, therefore we need to check timeout ourselves return this.id.equals(other.id) && getTimeout().equals(other.getTimeout()); } + + @Override + public boolean match(Task task) { + if (task.getDescription().startsWith(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX)) { + String taskId = task.getDescription().substring(TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX.length()); + return taskId.equals(this.id); + } + return false; + } } public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformActionRequestTests.java index e98e14e341cf7..80c415065e1bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/ScheduleNowTransformActionRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.transform.action.ScheduleNowTransformAction.Request; @@ -55,4 +56,12 @@ public void testValidationFailure() { assertThat(e, is(notNullValue())); assertThat(e.validationErrors(), contains("_schedule_now API does not support _all wildcard")); } + + public void testMatch() { + Request request = new Request("my-transform-7", TimeValue.timeValueSeconds(5)); + assertTrue(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-7", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "data_frame_my-transform-77", null, null))); + assertFalse(request.match(new AllocatedPersistentTask(123, "", "", "my-transform-7", null, null))); + } } From 89faf4497e1afc71f5d58ce979c4b23efee91258 Mon Sep 17 00:00:00 2001 From: Abdon Pijpelink Date: Tue, 5 Dec 2023 10:47:39 +0100 Subject: [PATCH 207/263] [DOCS] Add ES|QL 'getting started' code snippets to CSV tests (#102653) * [DOCS] Add ES|QL 'getting started' code snippets to CSV tests * Change dots in columns names into underscores * Add LIMIT 0 to ENRICH test * Move code snippets out of docs.csv-spec * Replace code snippets by includes * Add missing semicolon --- docs/reference/esql/esql-get-started.asciidoc | 70 ++++++------------- .../xpack/esql/CsvTestsDataLoader.java | 11 ++- .../src/main/resources/clientips.csv | 6 ++ .../src/main/resources/date.csv-spec | 42 +++++++++++ .../src/main/resources/dissect.csv-spec | 27 +++++++ .../src/main/resources/docs.csv-spec | 66 ++++++++++++++++- .../resources/enrich-policy-clientips.json | 7 ++ ...ages.json => enrich-policy-languages.json} | 0 .../src/main/resources/enrich.csv-spec | 27 +++++++ .../src/main/resources/eval.csv-spec | 22 ++++++ .../src/main/resources/mapping-clientips.json | 10 +++ .../main/resources/mapping-sample_data.json | 16 +++++ .../src/main/resources/sample_data.csv | 8 +++ .../src/main/resources/stats.csv-spec | 32 +++++++++ .../src/main/resources/where-like.csv-spec | 11 +++ .../src/main/resources/where.csv-spec | 11 +++ 16 files changed, 313 insertions(+), 53 deletions(-) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-clientips.json rename x-pack/plugin/esql/qa/testFixtures/src/main/resources/{enricy-policy-languages.json => enrich-policy-languages.json} (100%) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data.json create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data.csv diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index e54825406257f..4109d9d6f4ba3 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -39,7 +39,7 @@ This query returns up to 500 documents from the `sample_data` index: [source,esql] ---- -FROM sample_data +include::{esql-specs}/docs.csv-spec[tag=gs-from] ---- Each column corresponds to a field, and can be accessed by the name of that @@ -52,7 +52,7 @@ previous one: [source,esql] ---- -from sample_data +include::{esql-specs}/docs.csv-spec[tag=gs-from-lowercase] ---- ==== @@ -73,8 +73,7 @@ that are returned, up to a maximum of 10,000 rows: [source,esql] ---- -FROM sample_data -| LIMIT 3 +include::{esql-specs}/docs.csv-spec[tag=gs-limit] ---- [TIP] @@ -84,7 +83,7 @@ have to. The following query is identical to the previous one: [source,esql] ---- -FROM sample_data | LIMIT 3 +include::{esql-specs}/docs.csv-spec[tag=gs-limit-one-line] ---- ==== @@ -100,8 +99,7 @@ sort rows on one or more columns: [source,esql] ---- -FROM sample_data -| SORT @timestamp DESC +include::{esql-specs}/docs.csv-spec[tag=gs-sort] ---- [discrete] @@ -113,16 +111,14 @@ events with a duration longer than 5ms: [source,esql] ---- -FROM sample_data -| WHERE event.duration > 5000000 +include::{esql-specs}/where.csv-spec[tag=gs-where] ---- `WHERE` supports several <>. For example, you can use <> to run a wildcard query against the `message` column: [source,esql] ---- -FROM sample_data -| WHERE message LIKE "Connected*" +include::{esql-specs}/where-like.csv-spec[tag=gs-like] ---- [discrete] @@ -149,9 +145,7 @@ result set to 3 rows: [source,esql] ---- -FROM sample_data -| SORT @timestamp DESC -| LIMIT 3 +include::{esql-specs}/docs.csv-spec[tag=gs-chaining] ---- NOTE: The order of processing commands is important. First limiting the result @@ -169,8 +163,7 @@ other words: `event.duration` converted from nanoseconds to milliseconds. [source,esql] ---- -FROM sample_data -| EVAL duration_ms = event.duration / 1000000.0 +include::{esql-specs}/eval.csv-spec[tag=gs-eval] ---- `EVAL` supports several <>. For example, to round a @@ -179,8 +172,7 @@ number to the closest number with the specified number of digits, use the [source,esql] ---- -FROM sample_data -| EVAL duration_ms = ROUND(event.duration / 1000000.0, 1) +include::{esql-specs}/eval.csv-spec[tag=gs-round] ---- [discrete] @@ -193,16 +185,14 @@ example, the median duration: [source,esql] ---- -FROM sample_data -| STATS median_duration = MEDIAN(event.duration) +include::{esql-specs}/stats.csv-spec[tag=gs-stats] ---- You can calculate multiple stats with one command: [source,esql] ---- -FROM sample_data -| STATS median_duration = MEDIAN(event.duration), max_duration = MAX(event.duration) +include::{esql-specs}/stats.csv-spec[tag=gs-two-stats] ---- Use `BY` to group calculated stats by one or more columns. For example, to @@ -210,8 +200,7 @@ calculate the median duration per client IP: [source,esql] ---- -FROM sample_data -| STATS median_duration = MEDIAN(event.duration) BY client.ip +include::{esql-specs}/stats.csv-spec[tag=gs-stats-by] ---- [discrete] @@ -227,9 +216,7 @@ For example, to create hourly buckets for the data on October 23rd: [source,esql] ---- -FROM sample_data -| KEEP @timestamp -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket] ---- Combine `AUTO_BUCKET` with <> to create a histogram. For example, @@ -237,20 +224,14 @@ to count the number of events per hour: [source,esql] ---- -FROM sample_data -| KEEP @timestamp, event.duration -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") -| STATS COUNT(*) BY bucket +include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by] ---- Or the median duration per hour: [source,esql] ---- -FROM sample_data -| KEEP @timestamp, event.duration -| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") -| STATS median_duration = MEDIAN(event.duration) BY bucket +include::{esql-specs}/date.csv-spec[tag=gs-auto_bucket-stats-by-median] ---- [discrete] @@ -273,10 +254,7 @@ command: [source,esql] ---- -FROM sample_data -| KEEP @timestamp, client.ip, event.duration -| EVAL client.ip = TO_STRING(client.ip) -| ENRICH clientip_policy ON client.ip WITH env +include::{esql-specs}/enrich.csv-spec[tag=gs-enrich] ---- You can use the new `env` column that's added by the `ENRICH` command in @@ -285,11 +263,7 @@ environment: [source,esql] ---- -FROM sample_data -| KEEP @timestamp, client.ip, event.duration -| EVAL client.ip = TO_STRING(client.ip) -| ENRICH clientip_policy ON client.ip WITH env -| STATS median_duration = MEDIAN(event.duration) BY env +include::{esql-specs}/enrich.csv-spec[tag=gs-enrich-stats-by] ---- For more about data enrichment with {esql}, refer to <>. @@ -321,8 +295,7 @@ string, you can use the following `DISSECT` command: [source,esql] ---- -FROM sample_data -| DISSECT message "Connected to %{server.ip}" +include::{esql-specs}/dissect.csv-spec[tag=gs-dissect] ---- This adds a `server.ip` column to those rows that have a `message` that matches @@ -334,10 +307,7 @@ has accepted: [source,esql] ---- -FROM sample_data -| WHERE STARTS_WITH(message, "Connected to") -| DISSECT message "Connected to %{server.ip}" -| STATS COUNT(*) BY server.ip +include::{esql-specs}/dissect.csv-spec[tag=gs-dissect-stats-by] ---- For more about data processing with {esql}, refer to diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index fd4600e5e64ff..3df70b3b83d37 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -52,6 +52,8 @@ public class CsvTestsDataLoader { private static final TestsDataset APPS = new TestsDataset("apps", "mapping-apps.json", "apps.csv"); private static final TestsDataset LANGUAGES = new TestsDataset("languages", "mapping-languages.json", "languages.csv"); private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs", "mapping-ul_logs.json", "ul_logs.csv"); + private static final TestsDataset SAMPLE_DATA = new TestsDataset("sample_data", "mapping-sample_data.json", "sample_data.csv"); + private static final TestsDataset CLIENT_IPS = new TestsDataset("clientips", "mapping-clientips.json", "clientips.csv"); private static final TestsDataset AIRPORTS = new TestsDataset("airports", "mapping-airports.json", "airports.csv"); private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web", "mapping-airports_web.json", "airports_web.csv"); @@ -66,15 +68,20 @@ public class CsvTestsDataLoader { LANGUAGES, UL_LOGS.indexName, UL_LOGS, + SAMPLE_DATA.indexName, + SAMPLE_DATA, + CLIENT_IPS.indexName, + CLIENT_IPS, AIRPORTS.indexName, AIRPORTS, AIRPORTS_WEB.indexName, AIRPORTS_WEB ); - private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enricy-policy-languages.json"); + private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); + private static final EnrichConfig CLIENT_IPS_ENRICH = new EnrichConfig("clientip_policy", "enrich-policy-clientips.json"); - public static final List ENRICH_POLICIES = List.of(LANGUAGES_ENRICH); + public static final List ENRICH_POLICIES = List.of(LANGUAGES_ENRICH, CLIENT_IPS_ENRICH); /** *

    diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips.csv new file mode 100644 index 0000000000000..80918bc22d1fb --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/clientips.csv @@ -0,0 +1,6 @@ +client_ip:keyword,env:keyword +172.21.0.5,Development +172.21.2.113,QA +172.21.2.162,QA +172.21.3.15,Production +172.21.3.16,Production diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8b94c022aaf6a..f6c0666c54ed8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -725,3 +725,45 @@ birth_date:datetime 1952-02-27T00:00:00.000Z 1953-04-21T00:00:00.000Z ; + +docsGettingStartedAutoBucket +// tag::gs-auto_bucket[] +FROM sample_data +| KEEP @timestamp +| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +// end::gs-auto_bucket[] +| LIMIT 0 +; + +@timestamp:date | bucket:date +; + +docsGettingStartedAutoBucketStatsBy +// tag::gs-auto_bucket-stats-by[] +FROM sample_data +| KEEP @timestamp, event_duration +| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| STATS COUNT(*) BY bucket +// end::gs-auto_bucket-stats-by[] +| SORT bucket +; + +COUNT(*):long | bucket:date +2 |2023-10-23T12:00:00.000Z +5 |2023-10-23T13:00:00.000Z +; + +docsGettingStartedAutoBucketStatsByMedian +// tag::gs-auto_bucket-stats-by-median[] +FROM sample_data +| KEEP @timestamp, event_duration +| EVAL bucket = AUTO_BUCKET (@timestamp, 24, "2023-10-23T00:00:00Z", "2023-10-23T23:59:59Z") +| STATS median_duration = MEDIAN(event_duration) BY bucket +// end::gs-auto_bucket-stats-by-median[] +| SORT bucket +; + +median_duration:double | bucket:date +3107561.0 |2023-10-23T12:00:00.000Z +1756467.0 |2023-10-23T13:00:00.000Z +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec index 4c9c3a2681f50..1133b24cd1cf3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec @@ -159,6 +159,33 @@ emp_no:integer | a:keyword | b:keyword | c:keyword 10006 | [Principal, Senior] | [Support, Team] | [Engineer, Lead] ; +docsGettingStartedDissect +// tag::gs-dissect[] +FROM sample_data +| DISSECT message "Connected to %{server_ip}" +// end::gs-dissect[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword | server_ip:keyword +; + +docsGettingStartedDissectStatsBy +// tag::gs-dissect-stats-by[] +FROM sample_data +| WHERE STARTS_WITH(message, "Connected to") +| DISSECT message "Connected to %{server_ip}" +| STATS COUNT(*) BY server_ip +// end::gs-dissect-stats-by[] +| SORT server_ip +; + +COUNT(*):long | server_ip:keyword +1 |10.1.0.1 +1 |10.1.0.2 +1 |10.1.0.3 +; + emptyPattern#[skip:-8.11.99] ROW a="b c d"| DISSECT a "%{b} %{} %{d}"; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index bbbfa287ea695..a754194739992 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -650,4 +650,68 @@ FROM employees first_name:keyword | last_name:keyword Alejandro |McAlpine // end::rlike-result[] -; \ No newline at end of file +; + +docsGettingStartedFrom +// tag::gs-from[] +FROM sample_data +// end::gs-from[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedFromLowercase +// tag::gs-from-lowercase[] +from sample_data +// end::gs-from-lowercase[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedLimit +// tag::gs-limit[] +FROM sample_data +| LIMIT 3 +// end::gs-limit[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedLimitOneLine +// tag::gs-limit-one-line[] +FROM sample_data | LIMIT 3 +// end::gs-limit-one-line[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedSort +// tag::gs-sort[] +FROM sample_data +| SORT @timestamp DESC +// end::gs-sort[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; + +docsGettingStartedChaining +// tag::gs-chaining[] +FROM sample_data +| SORT @timestamp DESC +| LIMIT 3 +// end::gs-chaining[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-clientips.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-clientips.json new file mode 100644 index 0000000000000..2ca29a39c284d --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-clientips.json @@ -0,0 +1,7 @@ +{ + "match": { + "indices": "clientips", + "match_field": "client_ip", + "enrich_fields": ["env"] + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enricy-policy-languages.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-languages.json similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/enricy-policy-languages.json rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-policy-languages.json diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index 796a7bceca55d..f5847260bbb16 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -3,3 +3,30 @@ from employees | eval x = 1, y = to_string(languages) | enrich languages_policy emp_no:integer | language_name:keyword ; + +docsGettingStartedEnrich +// tag::gs-enrich[] +FROM sample_data +| KEEP @timestamp, client_ip, event_duration +| EVAL client_ip = TO_STRING(client_ip) +| ENRICH clientip_policy ON client_ip WITH env +// end::gs-enrich[] +| LIMIT 0 +; + +@timestamp:date | event_duration:long | client_ip:keyword | env:keyword +; + +docsGettingStartedEnrichStatsBy +// tag::gs-enrich-stats-by[] +FROM sample_data +| KEEP @timestamp, client_ip, event_duration +| EVAL client_ip = TO_STRING(client_ip) +| ENRICH clientip_policy ON client_ip WITH env +| STATS median_duration = MEDIAN(event_duration) BY env +// end::gs-enrich-stats-by[] +| LIMIT 0 +; + +median_duration:double | env:keyword +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index b29c8024950f9..7a5a90fb398eb 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -215,3 +215,25 @@ emp_no:integer | foldable:keyword | folded_mv:keyword 10001 | "foo,bar" | [foo, bar] 10002 | "foo,bar" | [foo, bar] ; + +docsGettingStartedEval +// tag::gs-eval[] +FROM sample_data +| EVAL duration_ms = event_duration / 1000000.0 +// end::gs-eval[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword | duration_ms:double +; + +docsGettingStartedRound +// tag::gs-round[] +FROM sample_data +| EVAL duration_ms = ROUND(event_duration / 1000000.0, 1) +// end::gs-round[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword | duration_ms:double +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json new file mode 100644 index 0000000000000..39bd37ce26c7f --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-clientips.json @@ -0,0 +1,10 @@ +{ + "properties": { + "client_ip": { + "type": "keyword" + }, + "env": { + "type": "keyword" + } + } + } \ No newline at end of file diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data.json new file mode 100644 index 0000000000000..838a8ba09b45a --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data.json @@ -0,0 +1,16 @@ +{ + "properties": { + "@timestamp": { + "type": "date" + }, + "client_ip": { + "type": "ip" + }, + "event_duration": { + "type": "long" + }, + "message": { + "type": "keyword" + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data.csv new file mode 100644 index 0000000000000..3a62394014102 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data.csv @@ -0,0 +1,8 @@ +@timestamp:date,client_ip:ip,event_duration:long,message:keyword +2023-10-23T13:55:01.543Z,172.21.3.15,1756467,Connected to 10.1.0.1 +2023-10-23T13:53:55.832Z,172.21.3.15,5033755,Connection error +2023-10-23T13:52:55.015Z,172.21.3.15,8268153,Connection error +2023-10-23T13:51:54.732Z,172.21.3.15,725448,Connection error +2023-10-23T13:33:34.937Z,172.21.0.5,1232382,Disconnected +2023-10-23T12:27:28.948Z,172.21.2.113,2764889,Connected to 10.1.0.2 +2023-10-23T12:15:03.360Z,172.21.2.162,3450233,Connected to 10.1.0.3 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 6050dba6acf3b..dc96d1736858c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -750,3 +750,35 @@ c:long | a:long 1 | 1 ; +docsGettingStartedStats +// tag::gs-stats[] +FROM sample_data +| STATS median_duration = MEDIAN(event_duration) +// end::gs-stats[] +; + +median_duration:double +2764889.0 +; + +docsGettingStartedTwoStats +// tag::gs-two-stats[] +FROM sample_data +| STATS median_duration = MEDIAN(event_duration), max_duration = MAX(event_duration) +// end::gs-two-stats[] +; + +median_duration:double | max_duration:long +2764889.0 |8268153 +; + +docsGettingStartedStatsBy +// tag::gs-stats-by[] +FROM sample_data +| STATS median_duration = MEDIAN(event_duration) BY client_ip +// end::gs-stats-by[] +| LIMIT 0 +; + +median_duration:double | client_ip:ip +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec index 49bf62bf77db7..37a1978524e7f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec @@ -287,3 +287,14 @@ row x = "C:\\foo\\bar.exe" | mv_expand x | where x LIKE "C:\\\\\\\\*"; x:keyword ; + +docsGettingStartedLike +// tag::gs-like[] +FROM sample_data +| WHERE message LIKE "Connected*" +// end::gs-like[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where.csv-spec index 89f329bc6dcb9..4a76f6bca0310 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where.csv-spec @@ -76,3 +76,14 @@ emp_no:integer | first_name:keyword 10010 |Duangkaew 10011 |Mary ; + +docsGettingStartedWhere +// tag::gs-where[] +FROM sample_data +| WHERE event_duration > 5000000 +// end::gs-where[] +| LIMIT 0 +; + +@timestamp:date | client_ip:ip | event_duration:long | message:keyword +; From 50d6552e3814cb592177c4b7c3e30cfffc52d5c3 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 5 Dec 2023 12:01:48 +0200 Subject: [PATCH 208/263] Error log when license verification fails locally (#102919) This change implements logging if the license verification fails on local nodes (after the the license has already been (erroneously) published in the cluster state). --- docs/changelog/102919.yaml | 5 ++ .../license/ClusterStateLicenseService.java | 88 +++++++++---------- .../license/LicenseVerifier.java | 24 ++++- 3 files changed, 70 insertions(+), 47 deletions(-) create mode 100644 docs/changelog/102919.yaml diff --git a/docs/changelog/102919.yaml b/docs/changelog/102919.yaml new file mode 100644 index 0000000000000..0de2e75abc6cf --- /dev/null +++ b/docs/changelog/102919.yaml @@ -0,0 +1,5 @@ +pr: 102919 +summary: Error log when license verification fails locally +area: License +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java index d4d62a75e98c7..d65fda90b87e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/ClusterStateLicenseService.java @@ -413,14 +413,6 @@ public void clusterChanged(ClusterChangedEvent event) { final ClusterState previousClusterState = event.previousState(); final ClusterState currentClusterState = event.state(); if (currentClusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { - if (XPackPlugin.isReadyForXPackCustomMetadata(currentClusterState) == false) { - logger.debug( - "cannot add license to cluster as the following nodes might not understand the license metadata: {}", - () -> XPackPlugin.nodesNotReadyForXPackCustomMetadata(currentClusterState) - ); - return; - } - final LicensesMetadata prevLicensesMetadata = previousClusterState.getMetadata().custom(LicensesMetadata.TYPE); final LicensesMetadata currentLicensesMetadata = currentClusterState.getMetadata().custom(LicensesMetadata.TYPE); // notify all interested plugins @@ -439,26 +431,7 @@ public void clusterChanged(ClusterChangedEvent event) { } else { logger.trace("license unchanged [{}]", currentLicensesMetadata); } - - License currentLicense = null; - boolean noLicenseInPrevMetadata = prevLicensesMetadata == null || prevLicensesMetadata.getLicense() == null; - if (noLicenseInPrevMetadata == false) { - currentLicense = prevLicensesMetadata.getLicense(); - } - boolean noLicenseInCurrentMetadata = (currentLicensesMetadata == null || currentLicensesMetadata.getLicense() == null); - if (noLicenseInCurrentMetadata == false) { - currentLicense = currentLicensesMetadata.getLicense(); - } - - boolean noLicense = noLicenseInPrevMetadata && noLicenseInCurrentMetadata; - // auto-generate license if no licenses ever existed or if the current license is basic and - // needs extended or if the license signature needs to be updated. this will trigger a subsequent cluster changed event - if (currentClusterState.getNodes().isLocalNodeElectedMaster() - && (noLicense - || LicenseUtils.licenseNeedsExtended(currentLicense) - || LicenseUtils.signatureNeedsUpdate(currentLicense, currentClusterState.nodes()))) { - registerOrUpdateSelfGeneratedLicense(); - } + maybeRegisterOrUpdateLicense(previousClusterState, currentClusterState); } else if (logger.isDebugEnabled()) { logger.debug("skipped license notifications reason: [{}]", GatewayService.STATE_NOT_RECOVERED_BLOCK); } @@ -468,24 +441,38 @@ private void updateXPackLicenseState(License license) { if (license == LicensesMetadata.LICENSE_TOMBSTONE) { // implies license has been explicitly deleted xPacklicenseState.update(LicenseUtils.getXPackLicenseStatus(license, clock)); - return; - } - checkForExpiredLicense(license); - } - - private boolean checkForExpiredLicense(License license) { - if (license != null) { + } else if (license != null) { XPackLicenseStatus xPackLicenseStatus = LicenseUtils.getXPackLicenseStatus(license, clock); xPacklicenseState.update(xPackLicenseStatus); if (xPackLicenseStatus.active()) { logger.debug("license [{}] - valid", license.uid()); - return false; } else { logger.warn("license [{}] - expired", license.uid()); - return true; } } - return false; + } + + private void maybeRegisterOrUpdateLicense(ClusterState previousClusterState, ClusterState currentClusterState) { + final LicensesMetadata prevLicensesMetadata = previousClusterState.getMetadata().custom(LicensesMetadata.TYPE); + final LicensesMetadata currentLicensesMetadata = currentClusterState.getMetadata().custom(LicensesMetadata.TYPE); + License currentLicense = null; + boolean noLicenseInPrevMetadata = prevLicensesMetadata == null || prevLicensesMetadata.getLicense() == null; + if (noLicenseInPrevMetadata == false) { + currentLicense = prevLicensesMetadata.getLicense(); + } + boolean noLicenseInCurrentMetadata = (currentLicensesMetadata == null || currentLicensesMetadata.getLicense() == null); + if (noLicenseInCurrentMetadata == false) { + currentLicense = currentLicensesMetadata.getLicense(); + } + boolean noLicense = noLicenseInPrevMetadata && noLicenseInCurrentMetadata; + // auto-generate license if no licenses ever existed or if the current license is basic and + // needs extended or if the license signature needs to be updated. this will trigger a subsequent cluster changed event + if (currentClusterState.getNodes().isLocalNodeElectedMaster() + && (noLicense + || LicenseUtils.licenseNeedsExtended(currentLicense) + || LicenseUtils.signatureNeedsUpdate(currentLicense, currentClusterState.nodes()))) { + registerOrUpdateSelfGeneratedLicense(); + } } /** @@ -496,12 +483,14 @@ private boolean checkForExpiredLicense(License license) { */ private void onUpdate(final LicensesMetadata currentLicensesMetadata) { final License license = getLicenseFromLicensesMetadata(currentLicensesMetadata); + // first update the XPackLicenseState + updateXPackLicenseState(license); // license can be null if the trial license is yet to be auto-generated // in this case, it is a no-op if (license != null) { - final License previousLicense = currentLicenseHolder.get(); + final License previousLicense = currentLicenseHolder.getAndSet(license); if (license.equals(previousLicense) == false) { - currentLicenseHolder.set(license); + // then register periodic job to update the XPackLicenseState with the latest expiration message scheduler.add(new SchedulerEngine.Job(LICENSE_JOB, nextLicenseCheck(license))); for (ExpirationCallback expirationCallback : expirationCallbacks) { scheduler.add( @@ -517,24 +506,25 @@ private void onUpdate(final LicensesMetadata currentLicensesMetadata) { } logger.info("license [{}] mode [{}] - valid", license.uid(), license.operationMode().name().toLowerCase(Locale.ROOT)); } - updateXPackLicenseState(license); } } // pkg private for tests SchedulerEngine.Schedule nextLicenseCheck(License license) { + final long licenseIssueDate = license.issueDate(); + final long licenseExpiryDate = LicenseUtils.getExpiryDate(license); return (startTime, time) -> { - if (time < license.issueDate()) { + if (time < licenseIssueDate) { // when we encounter a license with a future issue date // which can happen with autogenerated license, // we want to schedule a notification on the license issue date // so the license is notified once it is valid // see https://github.com/elastic/x-plugins/issues/983 - return license.issueDate(); - } else if (time < LicenseUtils.getExpiryDate(license)) { + return licenseIssueDate; + } else if (time < licenseExpiryDate) { // Re-check the license every day during the warning period up to the license expiration. // This will cause the warning message to be updated that is emitted on soon-expiring license use. - long nextTime = LicenseUtils.getExpiryDate(license) - LicenseSettings.LICENSE_EXPIRATION_WARNING_PERIOD.getMillis(); + long nextTime = licenseExpiryDate - LicenseSettings.LICENSE_EXPIRATION_WARNING_PERIOD.getMillis(); while (nextTime <= time) { nextTime += TimeValue.timeValueDays(1).getMillis(); } @@ -550,6 +540,7 @@ public License getLicense(final Metadata metadata) { } // visible for tests + @Nullable License getLicenseFromLicensesMetadata(@Nullable final LicensesMetadata metadata) { if (metadata != null) { License license = metadata.getLicense(); @@ -558,6 +549,13 @@ License getLicenseFromLicensesMetadata(@Nullable final LicensesMetadata metadata } else if (license != null) { if (license.verified()) { return license; + } else { + // this is an "error" level because an unverified license should not be present in the cluster state in the first place + logger.error( + "{} with uid [{}] failed verification on the local node.", + License.isAutoGeneratedLicense(license.signature()) ? "Autogenerated license" : "License", + license.uid() + ); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java index 1dff1aca29b3a..5c761c4811642 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java @@ -6,9 +6,12 @@ */ package org.elasticsearch.license; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.core.Streams; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,11 +31,15 @@ import java.util.Base64; import java.util.Collections; +import static org.elasticsearch.core.Strings.format; + /** * Responsible for verifying signed licenses */ public class LicenseVerifier { + private static final Logger logger = LogManager.getLogger(LicenseVerifier.class); + /** * verifies the license content with the signature using the packaged * public key @@ -65,7 +72,17 @@ public static boolean verifyLicense(final License license, PublicKey publicKey) while ((ref = iterator.next()) != null) { rsa.update(ref.bytes, ref.offset, ref.length); } - return rsa.verify(signedContent); + boolean verifyResult = rsa.verify(signedContent); + if (verifyResult == false) { + logger.warn( + () -> format( + "License with uid [%s] failed signature verification with the public key with sha256 [%s].", + license.uid(), + PUBLIC_KEY_DIGEST_HEX_STRING + ) + ); + } + return verifyResult; } catch (IOException | NoSuchAlgorithmException | SignatureException | InvalidKeyException e) { throw new IllegalStateException(e); } finally { @@ -76,12 +93,15 @@ public static boolean verifyLicense(final License license, PublicKey publicKey) } private static final PublicKey PUBLIC_KEY; + private static final String PUBLIC_KEY_DIGEST_HEX_STRING; static { try (InputStream is = LicenseVerifier.class.getResourceAsStream("/public.key")) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Streams.copy(is, out); - PUBLIC_KEY = CryptUtils.readPublicKey(out.toByteArray()); + byte[] publicKeyBytes = out.toByteArray(); + PUBLIC_KEY = CryptUtils.readPublicKey(publicKeyBytes); + PUBLIC_KEY_DIGEST_HEX_STRING = MessageDigests.toHexString(MessageDigests.sha256().digest(publicKeyBytes)); } catch (IOException e) { throw new AssertionError("key file is part of the source and must deserialize correctly", e); } From f64bb490550edb94715f1783203c46cc18653869 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 5 Dec 2023 12:23:34 +0200 Subject: [PATCH 209/263] Patterns support for allowed subjects by the JWT realm (#102426) This adds support for allowing JWT token sub claims with Lucene patterns and wildcards, by introducing a new JWT realm setting allowed_subject_patterns that can be used alongside the exist allowed_subjects realm setting. --- docs/changelog/102426.yaml | 5 + .../authentication/jwt-realm.asciidoc | 26 +- .../security/authc/jwt/JwtRealmSettings.java | 77 +++- .../xpack/security/authc/jwt/JwtRestIT.java | 10 +- .../security/authc/jwt/JwtAuthenticator.java | 40 +- .../authc/jwt/JwtStringClaimValidator.java | 97 +++-- .../JwtAuthenticatorAccessTokenTypeTests.java | 12 +- .../authc/jwt/JwtAuthenticatorTests.java | 241 +++++++++-- .../jwt/JwtStringClaimValidatorTests.java | 398 ++++++++++++++++-- 9 files changed, 797 insertions(+), 109 deletions(-) create mode 100644 docs/changelog/102426.yaml diff --git a/docs/changelog/102426.yaml b/docs/changelog/102426.yaml new file mode 100644 index 0000000000000..3aad50ed1eee0 --- /dev/null +++ b/docs/changelog/102426.yaml @@ -0,0 +1,5 @@ +pr: 102426 +summary: Patterns support for allowed subjects by the JWT realm +area: Authentication +type: feature +issues: [] diff --git a/docs/reference/security/authentication/jwt-realm.asciidoc b/docs/reference/security/authentication/jwt-realm.asciidoc index 68e20380449a5..4c9198956d21b 100644 --- a/docs/reference/security/authentication/jwt-realm.asciidoc +++ b/docs/reference/security/authentication/jwt-realm.asciidoc @@ -53,7 +53,8 @@ tokens can be issued by an OIDC Provider (OP), including ID Tokens. ID Tokens from an OIDC provider are well-defined JSON Web Tokens (JWT) and should be always compatible with a JWT realm of the `id_token` token type. The subject claim of an ID token represents the end-user. This means that ID tokens will generally have many allowed subjects. -Therefore, a JWT realm of `id_token` token type does _not_ mandate the `allowed_subjects` validation. +Therefore, a JWT realm of `id_token` token type does _not_ mandate the `allowed_subjects` +(or `allowed_subject_patterns`) validation. NOTE: Because JWTs are obtained external to {es}, you can define a custom workflow instead of using the OIDC workflow. However, the JWT format must still be JSON @@ -74,8 +75,8 @@ A typical usage of this flow is for an application to get a credential for itsel This is the use case that the `access_token` token type is designed for. It is likely that this application also obtains ID Tokens for its end-users. To prevent end-user ID Tokens being used to authenticate with the JWT realm configured -for the application, we mandate `allowed_subjects` validation when a JWT realm -has token type `access_token`. +for the application, we mandate `allowed_subjects` or `allowed_subject_patterns` +validation when a JWT realm has token type `access_token`. NOTE: Not every access token is formatted as a JSON Web Token (JWT). For it to be compatible with the JWT realm, it must at least use the JWT format and satisfies @@ -162,7 +163,8 @@ xpack.security.authc.realms.jwt.jwt2: token_type: access_token client_authentication.type: shared_secret allowed_issuer: "https://issuer.example.com/jwt/" - allowed_subjects: [ "123456-compute@developer.example.com" ] + allowed_subjects: [ "123456-compute@admin.example.com" ] + allowed_subject_patterns: [ "wild*@developer?.example.com", "/[a-z]+<1-10>\\@dev\\.example\\.com/"] allowed_audiences: [ "elasticsearch" ] required_claims: token_use: access @@ -181,7 +183,21 @@ Instructs the realm to treat and validate incoming JWTs as access tokens (`acces Specifies a list of JWT subjects that the realm will allow. These values are typically URLs, UUIDs, or other case-sensitive string values. -NOTE: This setting is mandatory for when `token_type` is `access_token`. +`allowed_subject_patterns`:: +Analogous to `allowed_subjects` but it accepts a list of <> +and wildcards for the allowed JWT subjects. Wildcards use the `*` and `?` special +characters (which are escaped by `\`) to mean "any string" and "any single character" +respectively, for example "a?\\**", matches "a1*" and "ab*whatever", but not "a", "abc", or "abc*" +(in Java strings `\` must itself be escaped by another `\`). +<> must be enclosed between `/`, +for example "/https?://[^/]+/?/" matches any http or https URL with no path component +(matches "https://elastic.co/" but not "https://elastic.co/guide"). + +NOTE: At least one of the `allowed_subjects` or `allowed_subject_patterns` settings must be specified +(and be non-empty) when `token_type` is `access_token`. + +NOTE: When both `allowed_subjects` and `allowed_subject_patterns` settings are specified +an incoming JWT's `sub` claim is accepted if it matches any of the two lists. `required_claims`:: Specifies a list of key/value pairs for additional verifications to be performed diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/jwt/JwtRealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/jwt/JwtRealmSettings.java index 1903dd5146f69..e75ff1ac321bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/jwt/JwtRealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/jwt/JwtRealmSettings.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.RealmSettings; @@ -162,6 +163,7 @@ private static Set> getNonSecureSettings() { set.addAll( List.of( ALLOWED_SUBJECTS, + ALLOWED_SUBJECT_PATTERNS, FALLBACK_SUB_CLAIM, FALLBACK_AUD_CLAIM, REQUIRED_CLAIMS, @@ -255,11 +257,82 @@ private static Set> getSecureSettings() { ); // JWT end-user settings - public static final Setting.AffixSetting> ALLOWED_SUBJECTS = Setting.affixKeySetting( RealmSettings.realmSettingPrefix(TYPE), "allowed_subjects", - key -> Setting.stringListSetting(key, values -> verifyNonNullNotEmpty(key, values, null), Setting.Property.NodeScope) + key -> Setting.stringListSetting(key, new Setting.Validator<>() { + + @Override + public void validate(List allowedSubjects) { + // validate values themselves are not null or empty + allowedSubjects.forEach(allowedSubject -> verifyNonNullNotEmpty(key, allowedSubject, null)); + } + + @Override + @SuppressWarnings("unchecked") + public void validate(List allowedSubjects, Map, Object> settings) { + // validate both allowed_subjects and allowed_subject_patterns are not simultaneously empty (which is the default value) + final String namespace = ALLOWED_SUBJECTS.getNamespace(ALLOWED_SUBJECTS.getConcreteSetting(key)); + final List allowedSubjectPatterns = (List) settings.get( + ALLOWED_SUBJECT_PATTERNS.getConcreteSettingForNamespace(namespace) + ); + if (allowedSubjects.isEmpty() && allowedSubjectPatterns.isEmpty()) { + throw new SettingsException( + "One of either [" + + ALLOWED_SUBJECTS.getConcreteSettingForNamespace(namespace).getKey() + + "] or [" + + ALLOWED_SUBJECT_PATTERNS.getConcreteSettingForNamespace(namespace).getKey() + + "] must be specified and not be empty." + ); + } + } + + @Override + public Iterator> settings() { + final String namespace = ALLOWED_SUBJECTS.getNamespace(ALLOWED_SUBJECTS.getConcreteSetting(key)); + final List> settings = List.of(ALLOWED_SUBJECT_PATTERNS.getConcreteSettingForNamespace(namespace)); + return settings.iterator(); + } + }, Setting.Property.NodeScope) + ); + + public static final Setting.AffixSetting> ALLOWED_SUBJECT_PATTERNS = Setting.affixKeySetting( + RealmSettings.realmSettingPrefix(TYPE), + "allowed_subject_patterns", + key -> Setting.stringListSetting(key, new Setting.Validator<>() { + + @Override + public void validate(List allowedSubjectPatterns) { + // validate values themselves are not null or empty + allowedSubjectPatterns.forEach(allowedSubjectPattern -> verifyNonNullNotEmpty(key, allowedSubjectPattern, null)); + } + + @Override + @SuppressWarnings("unchecked") + public void validate(List allowedSubjectPatterns, Map, Object> settings) { + // validate both allowed_subjects and allowed_subject_patterns are not simultaneously empty (which is the default value) + final String namespace = ALLOWED_SUBJECT_PATTERNS.getNamespace(ALLOWED_SUBJECT_PATTERNS.getConcreteSetting(key)); + final List allowedSubjects = (List) settings.get( + ALLOWED_SUBJECTS.getConcreteSettingForNamespace(namespace) + ); + if (allowedSubjects.isEmpty() && allowedSubjectPatterns.isEmpty()) { + throw new SettingsException( + "One of either [" + + ALLOWED_SUBJECTS.getConcreteSettingForNamespace(namespace).getKey() + + "] or [" + + ALLOWED_SUBJECT_PATTERNS.getConcreteSettingForNamespace(namespace).getKey() + + "] must be specified and not be empty." + ); + } + } + + @Override + public Iterator> settings() { + final String namespace = ALLOWED_SUBJECT_PATTERNS.getNamespace(ALLOWED_SUBJECT_PATTERNS.getConcreteSetting(key)); + final List> settings = List.of(ALLOWED_SUBJECTS.getConcreteSettingForNamespace(namespace)); + return settings.iterator(); + } + }, Setting.Property.NodeScope) ); // Registered claim names from the JWT spec https://www.rfc-editor.org/rfc/rfc7519#section-4.1. diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java index db59bea999852..8f134a9d37502 100644 --- a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java @@ -149,7 +149,15 @@ private static Map realmSettings(LocalClusterSpec.LocalNodeSpec settings.put("xpack.security.authc.realms.jwt.jwt2.fallback_claims.sub", "email"); settings.put("xpack.security.authc.realms.jwt.jwt2.fallback_claims.aud", "scope"); settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_issuer", "my-issuer"); - settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_subjects", SERVICE_SUBJECT.get()); + if (randomBoolean()) { + if (randomBoolean()) { + settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_subjects", SERVICE_SUBJECT.get()); + } else { + settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_subject_patterns", SERVICE_SUBJECT.get()); + } + } else { + settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_subject_patterns", "service_*@app?.example.com"); + } settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_audiences", "es01,es02,es03"); settings.put("xpack.security.authc.realms.jwt.jwt2.allowed_signature_algorithms", "HS256,HS384"); // Both email or sub works because of fallback diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticator.java index 9c1deff9ed891..b06aba1c9d87a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticator.java @@ -15,6 +15,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.RealmConfig; @@ -127,18 +128,19 @@ private static List configureFieldValidatorsForIdToken(RealmC final Clock clock = Clock.systemUTC(); final JwtStringClaimValidator subjectClaimValidator; - if (realmConfig.hasSetting(JwtRealmSettings.ALLOWED_SUBJECTS)) { - subjectClaimValidator = new JwtStringClaimValidator("sub", realmConfig.getSetting(JwtRealmSettings.ALLOWED_SUBJECTS), true); + if (realmConfig.hasSetting(JwtRealmSettings.ALLOWED_SUBJECTS) + || realmConfig.hasSetting(JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS)) { + subjectClaimValidator = getSubjectClaimValidator(realmConfig, null); } else { - // Allow any value for the sub claim as long as there is a non-null value + // Allows any non-null value for the sub claim subjectClaimValidator = JwtStringClaimValidator.ALLOW_ALL_SUBJECTS; } return List.of( JwtTypeValidator.INSTANCE, - new JwtStringClaimValidator("iss", List.of(realmConfig.getSetting(JwtRealmSettings.ALLOWED_ISSUER)), true), + new JwtStringClaimValidator("iss", true, List.of(realmConfig.getSetting(JwtRealmSettings.ALLOWED_ISSUER)), List.of()), subjectClaimValidator, - new JwtStringClaimValidator("aud", realmConfig.getSetting(JwtRealmSettings.ALLOWED_AUDIENCES), false), + new JwtStringClaimValidator("aud", false, realmConfig.getSetting(JwtRealmSettings.ALLOWED_AUDIENCES), List.of()), new JwtAlgorithmValidator(realmConfig.getSetting(JwtRealmSettings.ALLOWED_SIGNATURE_ALGORITHMS)), new JwtDateClaimValidator(clock, "iat", allowedClockSkew, JwtDateClaimValidator.Relationship.BEFORE_NOW, false), new JwtDateClaimValidator(clock, "exp", allowedClockSkew, JwtDateClaimValidator.Relationship.AFTER_NOW, false), @@ -157,9 +159,15 @@ private static List configureFieldValidatorsForAccessToken( return List.of( JwtTypeValidator.INSTANCE, - new JwtStringClaimValidator("iss", List.of(realmConfig.getSetting(JwtRealmSettings.ALLOWED_ISSUER)), true), - new JwtStringClaimValidator("sub", fallbackClaimLookup, realmConfig.getSetting(JwtRealmSettings.ALLOWED_SUBJECTS), true), - new JwtStringClaimValidator("aud", fallbackClaimLookup, realmConfig.getSetting(JwtRealmSettings.ALLOWED_AUDIENCES), false), + new JwtStringClaimValidator("iss", true, List.of(realmConfig.getSetting(JwtRealmSettings.ALLOWED_ISSUER)), List.of()), + getSubjectClaimValidator(realmConfig, fallbackClaimLookup), + new JwtStringClaimValidator( + "aud", + false, + fallbackClaimLookup, + realmConfig.getSetting(JwtRealmSettings.ALLOWED_AUDIENCES), + List.of() + ), new JwtAlgorithmValidator(realmConfig.getSetting(JwtRealmSettings.ALLOWED_SIGNATURE_ALGORITHMS)), new JwtDateClaimValidator(clock, "iat", allowedClockSkew, JwtDateClaimValidator.Relationship.BEFORE_NOW, false), new JwtDateClaimValidator(clock, "exp", allowedClockSkew, JwtDateClaimValidator.Relationship.AFTER_NOW, false) @@ -170,7 +178,21 @@ private List getRequireClaimsValidators() { final Settings requiredClaims = realmConfig.getSetting(JwtRealmSettings.REQUIRED_CLAIMS); return requiredClaims.names().stream().map(name -> { final List allowedValues = requiredClaims.getAsList(name); - return new JwtStringClaimValidator(name, allowedValues, false); + return new JwtStringClaimValidator(name, false, allowedValues, List.of()); }).toList(); } + + private static JwtStringClaimValidator getSubjectClaimValidator( + RealmConfig realmConfig, + @Nullable Map fallbackClaimLookup + ) { + // validateAllowedSubjectsSettings(realmConfig); + return new JwtStringClaimValidator( + "sub", + true, + fallbackClaimLookup, + realmConfig.getSetting(JwtRealmSettings.ALLOWED_SUBJECTS), + realmConfig.getSetting(JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + ); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java index 30ea0979a624f..76a1b243f5277 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidator.java @@ -11,49 +11,73 @@ import com.nimbusds.jwt.JWTClaimsSet; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.core.security.support.Automatons; +import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.function.Predicate; /** - * Validates a string claim against a list of allowed values. The validation is successful - * if the claim's value matches any of the allowed values. - * The claim's value can be either a single string or an array of strings. When it is an array - * of string, the validation passes when any member of the string array matches any of the allowed - * values. - * Whether a claim's value can be an array of strings is customised with the {@link #singleValuedClaim} - * field, which enforces the claim's value to be a single string if it is configured to {@code true}. - * - * NOTE the allowed values can be null which means skipping the actual value check, i.e. the validator - * succeeds as long as there is a (non-null) value. + * Validates a specific string claim form a {@link JWTClaimsSet} against both a list of explicit values and a list of Lucene patterns. + * The validation is successful if the claim's value matches any of the allowed values or patterns from the lists. + * The {@link JWTClaimsSet} claim value can either be a single string or an array of strings. + * The {@link JwtStringClaimValidator} can be configured to only accept a single string claim value + * (and reject string array claims) when the {@link #singleValuedClaim} field is set to {@code true}. + * When it is an array of string, the validation is successful when ANY array element matches ANY of the allowed values or patterns + * (and {@link #singleValuedClaim} field is {@code false}). */ public class JwtStringClaimValidator implements JwtFieldValidator { - public static JwtStringClaimValidator ALLOW_ALL_SUBJECTS = new JwtStringClaimValidator("sub", null, true); + // Allows any non-null value for the sub claim + public static final JwtStringClaimValidator ALLOW_ALL_SUBJECTS = new JwtStringClaimValidator("sub", true, List.of(), List.of("*")); private final String claimName; - @Nullable - private final Map fallbackClaimNames; - @Nullable - private final List allowedClaimValues; // Whether the claim should be a single string private final boolean singleValuedClaim; + @Nullable + private final Map fallbackClaimNames; + private final Predicate allowedClaimValuesPredicate; - public JwtStringClaimValidator(String claimName, List allowedClaimValues, boolean singleValuedClaim) { - this(claimName, null, allowedClaimValues, singleValuedClaim); + public JwtStringClaimValidator( + String claimName, + boolean singleValuedClaim, + Collection allowedClaimValues, + Collection allowedClaimValuePatterns + ) { + this(claimName, singleValuedClaim, null, allowedClaimValues, allowedClaimValuePatterns); } public JwtStringClaimValidator( String claimName, + boolean singleValuedClaim, Map fallbackClaimNames, - List allowedClaimValues, - boolean singleValuedClaim + Collection allowedClaimValues, + Collection allowedClaimValuePatterns ) { + assert allowedClaimValues != null : "allowed claim values should be empty rather than null"; + assert allowedClaimValuePatterns != null : "allowed claim value patterns should be empty rather than null"; this.claimName = claimName; - this.fallbackClaimNames = fallbackClaimNames; - this.allowedClaimValues = allowedClaimValues; this.singleValuedClaim = singleValuedClaim; + this.fallbackClaimNames = fallbackClaimNames; + this.allowedClaimValuesPredicate = new Predicate<>() { + private final Set allowedClaimsSet = new HashSet<>(allowedClaimValues); + private final Predicate allowedClaimPatternsPredicate = predicateFromPatterns(claimName, allowedClaimValuePatterns); + + @Override + public boolean test(String s) { + return allowedClaimsSet.contains(s) || allowedClaimPatternsPredicate.test(s); + } + + @Override + public String toString() { + return "[" + Strings.collectionToCommaDelimitedString(allowedClaimsSet) + "] || [" + allowedClaimPatternsPredicate + "]"; + } + }; } @Override @@ -63,18 +87,19 @@ public void validate(JWSHeader jwsHeader, JWTClaimsSet jwtClaimsSet) { if (claimValues == null) { throw new IllegalArgumentException("missing required string claim [" + fallbackableClaim + "]"); } - - if (allowedClaimValues != null && false == claimValues.stream().anyMatch(allowedClaimValues::contains)) { - throw new IllegalArgumentException( - "string claim [" - + fallbackableClaim - + "] has value [" - + Strings.collectionToCommaDelimitedString(claimValues) - + "] which does not match allowed claim values [" - + Strings.collectionToCommaDelimitedString(allowedClaimValues) - + "]" - ); + for (String claimValue : claimValues) { + if (allowedClaimValuesPredicate.test(claimValue)) { + return; + } } + throw new IllegalArgumentException( + "string claim [" + + fallbackableClaim + + "] has value [" + + Strings.collectionToCommaDelimitedString(claimValues) + + "] which does not match allowed claim values " + + allowedClaimValuesPredicate + ); } private List getStringClaimValues(FallbackableClaim fallbackableClaim) { @@ -85,4 +110,12 @@ private List getStringClaimValues(FallbackableClaim fallbackableClaim) { return fallbackableClaim.getStringListClaimValue(); } } + + private static Predicate predicateFromPatterns(String claimName, Collection patterns) { + try { + return Automatons.predicate(patterns); + } catch (Exception e) { + throw new SettingsException("Invalid patterns for allowed claim values for [" + claimName + "].", e); + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java index 40bf021a48a9c..b1bb03b95dd8c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java @@ -30,11 +30,17 @@ public void testSubjectIsRequired() throws ParseException { public void testAccessTokenTypeMandatesAllowedSubjects() { allowedSubject = null; + allowedSubjectPattern = null; final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> buildJwtAuthenticator()); - assertThat( - e.getMessage(), - containsString("Invalid empty list for [" + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + "]") + e.getCause().getMessage(), + containsString( + "One of either [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + + "] or [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "] must be specified and not be empty." + ) ); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorTests.java index dd1a984a0dcb5..7a44ebae95738 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.jwt.JwtAuthenticationToken; import org.elasticsearch.xpack.core.security.authc.jwt.JwtRealmSettings; +import org.elasticsearch.xpack.core.ssl.SSLService; import org.junit.Before; import java.text.ParseException; @@ -32,10 +34,8 @@ import java.time.temporal.ChronoUnit; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; -import static org.elasticsearch.test.TestMatchers.throwableWithMessage; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.mockito.ArgumentMatchers.any; @@ -51,6 +51,7 @@ public abstract class JwtAuthenticatorTests extends ESTestCase { protected String allowedIssuer; @Nullable protected String allowedSubject; + protected String allowedSubjectPattern; protected String allowedAudience; protected String fallbackSub; protected String fallbackAud; @@ -64,11 +65,19 @@ public void beforeTest() { allowedIssuer = randomAlphaOfLength(6); allowedAlgorithm = randomFrom(JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS_HMAC); if (getTokenType() == JwtRealmSettings.TokenType.ID_TOKEN) { + // allowedSubject and allowedSubjectPattern can both be null for allowedSubject = randomBoolean() ? randomAlphaOfLength(8) : null; + allowedSubjectPattern = randomBoolean() ? randomAlphaOfLength(8) : null; fallbackSub = null; fallbackAud = null; } else { - allowedSubject = randomAlphaOfLength(8); + if (randomBoolean()) { + allowedSubject = randomAlphaOfLength(8); + allowedSubjectPattern = randomBoolean() ? randomAlphaOfLength(8) : null; + } else { + allowedSubject = randomBoolean() ? randomAlphaOfLength(8) : null; + allowedSubjectPattern = randomAlphaOfLength(8); + } fallbackSub = randomBoolean() ? "_" + randomAlphaOfLength(5) : null; fallbackAud = randomBoolean() ? "_" + randomAlphaOfLength(8) : null; } @@ -84,7 +93,7 @@ public void testRequiredClaims() throws ParseException { "iss", allowedIssuer, "sub", - allowedSubject == null ? randomAlphaOfLengthBetween(10, 18) : allowedSubject, + getValidSubClaimValue(), "aud", allowedAudience, requiredClaim.v1(), @@ -122,7 +131,7 @@ public void testMismatchedRequiredClaims() throws ParseException { "iss", allowedIssuer, "sub", - allowedSubject == null ? randomAlphaOfLengthBetween(10, 18) : allowedSubject, + getValidSubClaimValue(), "aud", allowedAudience, requiredClaim.v1(), @@ -155,10 +164,9 @@ public void testMismatchedRequiredClaims() throws ParseException { + "] has value [" + mismatchRequiredClaimValue + "] which does not match allowed claim values [" - + requiredClaim.v2().stream().collect(Collectors.joining(",")) - + "]" ) ); + requiredClaim.v2().stream().forEach(requiredClaim -> { assertThat(e.getMessage(), containsString(requiredClaim)); }); } public void testMissingRequiredClaims() throws ParseException { @@ -168,7 +176,7 @@ public void testMissingRequiredClaims() throws ParseException { "iss", allowedIssuer, "sub", - allowedSubject == null ? randomAlphaOfLengthBetween(10, 18) : allowedSubject, + getValidSubClaimValue(), "aud", allowedAudience, "iat", @@ -233,14 +241,188 @@ protected void doTestInvalidIssuerIsCheckedBeforeAlgorithm(JwtAuthenticator jwtA final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, future::actionGet); assertThat( - e, - throwableWithMessage( - "string claim [iss] has value [" + invalidIssuer + "] which does not match allowed claim values [" + allowedIssuer + "]" + e.getMessage(), + containsString( + "string claim [iss] has value [" + + invalidIssuer + + "] which does not match allowed claim " + + "values [" + + allowedIssuer + + "]" + ) + ); + } + + public void testInvalidAllowedSubjectClaimPattern() { + allowedSubjectPattern = "/invalid pattern"; + final SettingsException e = expectThrows(SettingsException.class, () -> buildJwtAuthenticator()); + assertThat(e.getMessage(), containsString("Invalid patterns for allowed claim values for [sub].")); + } + + public void testEmptyAllowedSubjectIsInvalid() { + allowedSubject = null; + allowedSubjectPattern = null; + RealmConfig someJWTRealmConfig = buildJWTRealmConfig(); + final Settings.Builder builder = Settings.builder(); + builder.put(someJWTRealmConfig.settings()); + boolean emptySubjects = randomBoolean(); + if (emptySubjects) { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS), List.of("")); + } else { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS), List.of("")); + } + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new JwtAuthenticator( + new RealmConfig( + someJWTRealmConfig.identifier(), + builder.build(), + someJWTRealmConfig.env(), + someJWTRealmConfig.threadContext() + ), + mock(SSLService.class), + () -> {} ) ); + if (emptySubjects) { + assertThat( + e.getMessage(), + containsString( + "Invalid empty value for [" + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + "]." + ) + ); + } else { + assertThat( + e.getMessage(), + containsString( + "Invalid empty value for [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "]." + ) + ); + } + } + + public void testNoAllowedSubjectInvalidSettings() { + allowedSubject = null; + allowedSubjectPattern = null; + RealmConfig someJWTRealmConfig = buildJWTRealmConfig(); + { + final Settings.Builder builder = Settings.builder(); + builder.put(someJWTRealmConfig.settings()); + if (randomBoolean()) { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS), List.of()); + } else { + builder.putNull(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS)); + } + if (randomBoolean()) { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS), List.of()); + } else { + builder.putNull(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS)); + } + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new JwtAuthenticator( + new RealmConfig( + someJWTRealmConfig.identifier(), + builder.build(), + someJWTRealmConfig.env(), + someJWTRealmConfig.threadContext() + ), + mock(SSLService.class), + () -> {} + ) + ); + assertThat( + e.getCause().getMessage(), + containsString( + "One of either [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + + "] or [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "] must be specified and not be empty." + ) + ); + } + { + final Settings.Builder builder = Settings.builder(); + builder.put(someJWTRealmConfig.settings()); + if (randomBoolean()) { + builder.putNull(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS)); + } else { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS), List.of()); + } + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new JwtAuthenticator( + new RealmConfig( + someJWTRealmConfig.identifier(), + builder.build(), + someJWTRealmConfig.env(), + someJWTRealmConfig.threadContext() + ), + mock(SSLService.class), + () -> {} + ) + ); + assertThat( + e.getCause().getMessage(), + containsString( + "One of either [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + + "] or [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "] must be specified and not be empty." + ) + ); + } + { + final Settings.Builder builder = Settings.builder(); + builder.put(someJWTRealmConfig.settings()); + if (randomBoolean()) { + builder.putNull(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS)); + } else { + builder.putList(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS), List.of()); + } + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new JwtAuthenticator( + new RealmConfig( + someJWTRealmConfig.identifier(), + builder.build(), + someJWTRealmConfig.env(), + someJWTRealmConfig.threadContext() + ), + mock(SSLService.class), + () -> {} + ) + ); + assertThat( + e.getCause().getMessage(), + containsString( + "One of either [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS) + + "] or [" + + RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS) + + "] must be specified and not be empty." + ) + ); + } } protected JwtAuthenticator buildJwtAuthenticator() { + final RealmConfig realmConfig = buildJWTRealmConfig(); + final JwtAuthenticator jwtAuthenticator = spy(new JwtAuthenticator(realmConfig, null, () -> {})); + // Short circuit signature validation to be always successful since this test class does not test it + doAnswer(invocation -> { + final ActionListener listener = invocation.getArgument(2); + listener.onResponse(null); + return null; + }).when(jwtAuthenticator).validateSignature(any(), any(), anyActionListener()); + return jwtAuthenticator; + } + + protected RealmConfig buildJWTRealmConfig() { final RealmConfig.RealmIdentifier realmIdentifier = new RealmConfig.RealmIdentifier(JwtRealmSettings.TYPE, realmName); final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.HMAC_KEY), randomAlphaOfLength(40)); @@ -251,11 +433,12 @@ protected JwtAuthenticator buildJwtAuthenticator() { .put(RealmSettings.getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), randomIntBetween(0, 99)) .put("path.home", randomAlphaOfLength(10)) .setSecureSettings(secureSettings); - if (allowedSubject != null) { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS), allowedSubject); } - + if (allowedSubjectPattern != null) { + builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS), allowedSubjectPattern); + } if (getTokenType() == JwtRealmSettings.TokenType.ID_TOKEN) { if (randomBoolean()) { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.TOKEN_TYPE), "id_token"); @@ -263,14 +446,12 @@ protected JwtAuthenticator buildJwtAuthenticator() { } else { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.TOKEN_TYPE), "access_token"); } - if (fallbackSub != null) { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_SUB_CLAIM), fallbackSub); } if (fallbackAud != null) { builder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_AUD_CLAIM), fallbackAud); } - if (requiredClaim != null) { final String requiredClaimsKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.REQUIRED_CLAIMS) + requiredClaim .v1(); @@ -280,24 +461,20 @@ protected JwtAuthenticator buildJwtAuthenticator() { builder.putList(requiredClaimsKey, requiredClaim.v2()); } } - final Settings settings = builder.build(); + return new RealmConfig(realmIdentifier, settings, TestEnvironment.newEnvironment(settings), new ThreadContext(settings)); + } - final RealmConfig realmConfig = new RealmConfig( - realmIdentifier, - settings, - TestEnvironment.newEnvironment(settings), - new ThreadContext(settings) - ); - - final JwtAuthenticator jwtAuthenticator = spy(new JwtAuthenticator(realmConfig, null, () -> {})); - // Short circuit signature validation to be always successful since this test class does not test it - doAnswer(invocation -> { - final ActionListener listener = invocation.getArgument(2); - listener.onResponse(null); - return null; - }).when(jwtAuthenticator).validateSignature(any(), any(), anyActionListener()); - - return jwtAuthenticator; + private String getValidSubClaimValue() { + if (allowedSubject == null && allowedSubjectPattern == null) { + // any subject is valid + return randomAlphaOfLengthBetween(10, 18); + } else if (allowedSubject == null) { + return allowedSubjectPattern; + } else if (allowedSubjectPattern == null) { + return allowedSubject; + } else { + return randomFrom(allowedSubject, allowedSubjectPattern); + } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidatorTests.java index 41e666db39a5f..c59c1e19e0ee1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtStringClaimValidatorTests.java @@ -10,10 +10,12 @@ import com.nimbusds.jose.JWSHeader; import com.nimbusds.jwt.JWTClaimsSet; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; import java.text.ParseException; import java.util.List; +import java.util.Locale; import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; @@ -28,11 +30,11 @@ public void testClaimIsNotString() throws ParseException { final JwtStringClaimValidator validator; final JWTClaimsSet jwtClaimsSet; if (randomBoolean()) { - validator = new JwtStringClaimValidator(claimName, List.of(), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(), List.of()); // fallback claim is ignored jwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, List.of(42), fallbackClaimName, randomAlphaOfLength(8))); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), List.of(), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), Map.of(claimName, fallbackClaimName), List.of(), List.of()); jwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, List.of(42))); } @@ -46,16 +48,16 @@ public void testClaimIsNotString() throws ParseException { public void testClaimIsNotSingleValued() throws ParseException { final String claimName = randomAlphaOfLengthBetween(10, 18); - final String fallbackClaimName = randomAlphaOfLength(12); + final String fallbackClaimName = randomAlphaOfLength(8); final JwtStringClaimValidator validator; final JWTClaimsSet jwtClaimsSet; if (randomBoolean()) { - validator = new JwtStringClaimValidator(claimName, List.of(), true); + validator = new JwtStringClaimValidator(claimName, true, List.of(), List.of()); // fallback claim is ignored jwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, List.of("foo", "bar"), fallbackClaimName, randomAlphaOfLength(8))); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), List.of(), true); + validator = new JwtStringClaimValidator(claimName, true, Map.of(claimName, fallbackClaimName), List.of(), List.of()); jwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, List.of("foo", "bar"))); } @@ -69,14 +71,14 @@ public void testClaimIsNotSingleValued() throws ParseException { public void testClaimDoesNotExist() throws ParseException { final String claimName = randomAlphaOfLengthBetween(10, 18); - final String fallbackClaimName = randomAlphaOfLength(12); + final String fallbackClaimName = randomAlphaOfLength(8); final JwtStringClaimValidator validator; final JWTClaimsSet jwtClaimsSet; if (randomBoolean()) { - validator = new JwtStringClaimValidator(claimName, List.of(), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(), List.of()); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), List.of(), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), Map.of(claimName, fallbackClaimName), List.of(), List.of()); } jwtClaimsSet = JWTClaimsSet.parse(Map.of()); @@ -89,7 +91,7 @@ public void testClaimDoesNotExist() throws ParseException { public void testMatchingClaimValues() throws ParseException { final String claimName = randomAlphaOfLengthBetween(10, 18); - final String fallbackClaimName = randomAlphaOfLength(12); + final String fallbackClaimName = randomAlphaOfLength(8); final String claimValue = randomAlphaOfLength(10); final boolean singleValuedClaim = randomBoolean(); final List allowedClaimValues = List.of(claimValue, randomAlphaOfLengthBetween(11, 20)); @@ -99,11 +101,17 @@ public void testMatchingClaimValues() throws ParseException { final JWTClaimsSet validJwtClaimsSet; final boolean noFallback = randomBoolean(); if (noFallback) { - validator = new JwtStringClaimValidator(claimName, allowedClaimValues, singleValuedClaim); + validator = new JwtStringClaimValidator(claimName, singleValuedClaim, allowedClaimValues, List.of()); // fallback claim is ignored validJwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, incomingClaimValue, fallbackClaimName, List.of(42))); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), allowedClaimValues, singleValuedClaim); + validator = new JwtStringClaimValidator( + claimName, + singleValuedClaim, + Map.of(claimName, fallbackClaimName), + allowedClaimValues, + List.of() + ); validJwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, incomingClaimValue)); } @@ -113,35 +121,128 @@ public void testMatchingClaimValues() throws ParseException { throw new AssertionError("validation should have passed without exception", e); } - final JWTClaimsSet invalidJwtClaimsSet; - if (noFallback) { - // fallback is ignored (even when it has a valid value) since the main claim exists - invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, "not-" + claimValue, fallbackClaimName, claimValue)); + String invalidClaimValue; + if (randomBoolean()) { + invalidClaimValue = "not-" + claimValue; } else { - invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "not-" + claimValue)); + // letter case mismatch: invert case at pos i + int i = randomIntBetween(0, claimValue.length() - 1); + invalidClaimValue = claimValue.substring(0, i); + if (Character.isUpperCase(claimValue.charAt(i))) { + invalidClaimValue += claimValue.substring(i, i).toLowerCase(Locale.ROOT); + } else if (Character.isLowerCase(claimValue.charAt(i))) { + invalidClaimValue += claimValue.substring(i, i).toUpperCase(Locale.ROOT); + } else { + throw new AssertionError("Unrecognized case"); + } + invalidClaimValue += claimValue.substring(i + 1); } + { + final JWTClaimsSet invalidJwtClaimsSet; + if (noFallback) { + // fallback is ignored (even when it has a valid value) since the main claim exists + invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, invalidClaimValue, fallbackClaimName, claimValue)); + } else { + invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, invalidClaimValue)); + } - final IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> validator.validate(getJwsHeader(), invalidJwtClaimsSet) - ); - assertThat(e.getMessage(), containsString("does not match allowed claim values")); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidJwtClaimsSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } } - public void testDoesNotSupportWildcardOrRegex() throws ParseException { + public void testWildcardAndRegexMatchingClaimValues() throws ParseException { final String claimName = randomAlphaOfLengthBetween(10, 18); - final String fallbackClaimName = randomAlphaOfLength(12); + final String fallbackClaimName = randomAlphaOfLength(8); + final List allowedClaimPatterns = List.of("a?\\**", "/https?://[^/]+/?/"); + + final boolean noFallback = randomBoolean(); + final JwtStringClaimValidator validator; + if (noFallback) { + validator = new JwtStringClaimValidator(claimName, false, List.of(), allowedClaimPatterns); + } else { + validator = new JwtStringClaimValidator( + claimName, + false, + Map.of(claimName, fallbackClaimName), + List.of(), + allowedClaimPatterns + ); + } + for (String incomingClaimValue : List.of("a1*", "ab*whatever", "https://elastic.co/")) { + final JWTClaimsSet validJwtClaimsSet; + if (noFallback) { + // fallback claim is ignored + validJwtClaimsSet = JWTClaimsSet.parse( + Map.of( + claimName, + randomBoolean() ? incomingClaimValue : List.of(incomingClaimValue, "other_stuff"), + fallbackClaimName, + List.of(42) + ) + ); + } else { + validJwtClaimsSet = JWTClaimsSet.parse( + Map.of(fallbackClaimName, randomBoolean() ? incomingClaimValue : List.of(incomingClaimValue, "other_stuff")) + ); + } + try { + validator.validate(getJwsHeader(), validJwtClaimsSet); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + } + for (String invalidIncomingClaimValue : List.of("a", "abc", "abc*", "https://elastic.co/guide")) { + final JWTClaimsSet invalidJwtClaimsSet; + if (noFallback) { + // fallback claim is ignored + invalidJwtClaimsSet = JWTClaimsSet.parse( + Map.of( + claimName, + randomBoolean() ? invalidIncomingClaimValue : List.of(invalidIncomingClaimValue, "other_stuff"), + fallbackClaimName, + List.of(42) + ) + ); + } else { + invalidJwtClaimsSet = JWTClaimsSet.parse( + Map.of( + fallbackClaimName, + randomBoolean() ? invalidIncomingClaimValue : List.of(invalidIncomingClaimValue, "other_stuff") + ) + ); + } + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidJwtClaimsSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + + public void testValueAllowSettingDoesNotSupportWildcardOrRegex() throws ParseException { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomAlphaOfLength(8); final String claimValue = randomFrom("*", "/.*/"); final JwtStringClaimValidator validator; final JWTClaimsSet invalidJwtClaimsSet; final boolean noFallback = randomBoolean(); if (noFallback) { - validator = new JwtStringClaimValidator(claimName, List.of(claimValue), randomBoolean()); + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(claimValue), List.of()); // fallback is ignored (even when it has a valid value) since the main claim exists invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, randomAlphaOfLengthBetween(1, 10), fallbackClaimName, claimValue)); } else { - validator = new JwtStringClaimValidator(claimName, Map.of(claimName, fallbackClaimName), List.of(claimValue), randomBoolean()); + validator = new JwtStringClaimValidator( + claimName, + randomBoolean(), + Map.of(claimName, fallbackClaimName), + List.of(claimValue), + List.of() + ); invalidJwtClaimsSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, randomAlphaOfLengthBetween(1, 10))); } @@ -167,6 +268,253 @@ public void testDoesNotSupportWildcardOrRegex() throws ParseException { } } + public void testSinglePatternSingleClaim() throws ParseException { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomAlphaOfLength(8); + final String claimPattern = randomFrom("a*", "/a.*/"); + final JwtStringClaimValidator validator; + final JWTClaimsSet singleValueClaimSet; + final boolean noFallback = randomBoolean(); + if (noFallback) { + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(), List.of(claimPattern)); + singleValueClaimSet = JWTClaimsSet.parse( + Map.of(claimName, "a_claim", fallbackClaimName, randomFrom(List.of("invalid", "invalid2"), "invalid"), "something", "else") + ); + } else { + validator = new JwtStringClaimValidator( + claimName, + randomBoolean(), + Map.of(claimName, fallbackClaimName), + List.of(), + List.of(claimPattern) + ); + singleValueClaimSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "a_fallback_claim", "something", "else")); + } + try { + validator.validate(getJwsHeader(), singleValueClaimSet); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse( + Map.of(claimName, "invalid", fallbackClaimName, randomFrom(List.of("a_claim", "a_claim2"), "a_claim"), "something", "else") + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "invalid", "something", "else")); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + if (noFallback) { + assertThat(e.getMessage(), containsString("missing required string claim")); + } else { + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + } + + public void testPatternListSingleClaim() throws ParseException { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomAlphaOfLength(8); + final List claimPatterns = List.of("a*", "/b.*b/"); + final JwtStringClaimValidator validator; + final JWTClaimsSet singleValueClaimSet; + final boolean noFallback = randomBoolean(); + if (noFallback) { + validator = new JwtStringClaimValidator(claimName, randomBoolean(), List.of(), claimPatterns); + singleValueClaimSet = JWTClaimsSet.parse( + Map.of( + claimName, + "b_claim_b", + fallbackClaimName, + randomFrom(List.of("invalid", "invalid2"), "invalid"), + "something", + "else" + ) + ); + } else { + validator = new JwtStringClaimValidator( + claimName, + randomBoolean(), + Map.of(claimName, fallbackClaimName), + List.of(), + claimPatterns + ); + singleValueClaimSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "b_fallback_claim_b", "something", "else")); + } + try { + validator.validate(getJwsHeader(), singleValueClaimSet); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse( + Map.of( + claimName, + "invalid", + fallbackClaimName, + randomFrom(List.of("b_claim_b", "b_claim2_b"), "b_claim_b"), + "something", + "else" + ) + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse(Map.of(fallbackClaimName, "invalid", "something", "else")); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + if (noFallback) { + assertThat(e.getMessage(), containsString("missing required string claim")); + } else { + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + } + + public void testPatternListClaimList() throws ParseException { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomAlphaOfLength(8); + final List claimPatterns = List.of("a*", "/b.*b/"); + final JwtStringClaimValidator validator; + final JWTClaimsSet singleValueClaimSet; + final boolean noFallback = randomBoolean(); + if (noFallback) { + validator = new JwtStringClaimValidator(claimName, false, List.of(), claimPatterns); + singleValueClaimSet = JWTClaimsSet.parse( + Map.of( + claimName, + List.of("invalid", "b_claim_b"), + fallbackClaimName, + randomFrom(List.of("invalid", "invalid2"), "invalid"), + "something", + "else" + ) + ); + } else { + validator = new JwtStringClaimValidator(claimName, false, Map.of(claimName, fallbackClaimName), List.of(), claimPatterns); + singleValueClaimSet = JWTClaimsSet.parse( + Map.of(fallbackClaimName, List.of("invalid", "b_fallback_claim_b"), "something", "else") + ); + } + try { + validator.validate(getJwsHeader(), singleValueClaimSet); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse( + Map.of( + claimName, + List.of("invalid", "invalid2"), + fallbackClaimName, + randomFrom(List.of("b_claim_b", "a_claim"), "b_claim_b"), + "something", + "else" + ) + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + { + JWTClaimsSet invalidClaimSet = JWTClaimsSet.parse( + Map.of(fallbackClaimName, List.of("invalid", "invalid2"), "something", "else") + ); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> validator.validate(getJwsHeader(), invalidClaimSet) + ); + if (noFallback) { + assertThat(e.getMessage(), containsString("missing required string claim")); + } else { + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + } + + public void testBothPatternAndSimpleValue() { + final String claimName = randomAlphaOfLengthBetween(10, 18); + final String fallbackClaimName = randomFrom(randomAlphaOfLength(8), null); + final List claimPatterns = List.of("a*", "/.*Z.*/", "*b"); + final List claimValues = List.of("c", "dd", "eZe"); + final JwtStringClaimValidator singleValueValidator = new JwtStringClaimValidator( + claimName, + randomBoolean(), + fallbackClaimName == null ? null : Map.of(claimName, fallbackClaimName), + claimValues, + claimPatterns + ); + for (String claimValue : List.of("a_claim", "anotZer_claim", "Z", "claim_b", "c", "dd", "eZe")) { + if (fallbackClaimName != null) { + try { + singleValueValidator.validate( + getJwsHeader(), + JWTClaimsSet.parse(Map.of(fallbackClaimName, claimValue, "something", "else")) + ); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + } else { + try { + singleValueValidator.validate(getJwsHeader(), JWTClaimsSet.parse(Map.of(claimName, claimValue, "something", "else"))); + } catch (Exception e) { + throw new AssertionError("validation should have passed without exception", e); + } + } + } + for (String invalidClaimValue : List.of("invalid", "cc", "ca", "dda", "ba")) { + IllegalArgumentException e; + if (fallbackClaimName != null) { + e = expectThrows( + IllegalArgumentException.class, + () -> singleValueValidator.validate( + getJwsHeader(), + JWTClaimsSet.parse(Map.of(fallbackClaimName, invalidClaimValue, "something", "else")) + ) + ); + } else { + e = expectThrows( + IllegalArgumentException.class, + () -> singleValueValidator.validate( + getJwsHeader(), + JWTClaimsSet.parse(Map.of(claimName, invalidClaimValue, "something", "else")) + ) + ); + } + assertThat(e.getMessage(), containsString("does not match allowed claim values")); + } + } + + public void testInvalidPatternThrows() { + String claimName = randomAlphaOfLength(4); + SettingsException e = expectThrows( + SettingsException.class, + () -> new JwtStringClaimValidator( + claimName, + randomBoolean(), + randomBoolean() ? null : Map.of(randomAlphaOfLength(4), randomAlphaOfLength(8)), + randomBoolean() ? List.of() : List.of("dummy"), + List.of("/invalid pattern") + ) + ); + assertThat(e.getMessage(), containsString("Invalid patterns for allowed claim values for [" + claimName + "].")); + } + public void testAllowAllSubjects() { try { JwtStringClaimValidator.ALLOW_ALL_SUBJECTS.validate( From ad9cfcf2e4f2d162417ed63ce2aae5d929cbf57b Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 5 Dec 2023 10:30:30 +0000 Subject: [PATCH 210/263] Remove version field from CachedBlob (#102706) The field isn't actually used, but we need to keep it around for BwC for now --- .../cache/blob/BlobStoreCacheService.java | 2 - .../cache/blob/CachedBlob.java | 45 ++++--------------- 2 files changed, 9 insertions(+), 38 deletions(-) diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java index 448e1e02d889e..a7ba0294d5c98 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheService.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.IndexFileNames; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.get.GetRequest; @@ -245,7 +244,6 @@ public final void putAsync( try { final CachedBlob cachedBlob = new CachedBlob( Instant.ofEpochMilli(timeInEpochMillis), - Version.CURRENT, repository, name, generatePath(snapshotId, indexId, shardId), diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/CachedBlob.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/CachedBlob.java index 91d2900553444..aba553e563c3e 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/CachedBlob.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/CachedBlob.java @@ -10,6 +10,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -23,18 +24,17 @@ public class CachedBlob implements ToXContent { /** * Sentinel {@link CachedBlob} indicating that searching the cache index returned an error. */ - public static final CachedBlob CACHE_NOT_READY = new CachedBlob(null, null, null, "CACHE_NOT_READY", null, BytesArray.EMPTY, 0L, 0L); + public static final CachedBlob CACHE_NOT_READY = new CachedBlob(null, null, "CACHE_NOT_READY", null, BytesArray.EMPTY, 0L, 0L); /** * Sentinel {@link CachedBlob} indicating that the cache index definitely did not contain the requested data. */ - public static final CachedBlob CACHE_MISS = new CachedBlob(null, null, null, "CACHE_MISS", null, BytesArray.EMPTY, 0L, 0L); + public static final CachedBlob CACHE_MISS = new CachedBlob(null, null, "CACHE_MISS", null, BytesArray.EMPTY, 0L, 0L); private static final String TYPE = "blob"; public static final String CREATION_TIME_FIELD = "creation_time"; private final Instant creationTime; - private final Version version; private final String repository; private final String name; private final String path; @@ -43,30 +43,12 @@ public class CachedBlob implements ToXContent { private final long from; private final long to; - public CachedBlob( - Instant creationTime, - Version version, - String repository, - String name, - String path, - BytesReference content, - long offset - ) { - this(creationTime, version, repository, name, path, content, offset, offset + (content == null ? 0 : content.length())); + public CachedBlob(Instant creationTime, String repository, String name, String path, BytesReference content, long offset) { + this(creationTime, repository, name, path, content, offset, offset + (content == null ? 0 : content.length())); } - private CachedBlob( - Instant creationTime, - Version version, - String repository, - String name, - String path, - BytesReference content, - long from, - long to - ) { + private CachedBlob(Instant creationTime, String repository, String name, String path, BytesReference content, long from, long to) { this.creationTime = creationTime; - this.version = version; this.repository = repository; this.name = name; this.path = path; @@ -78,11 +60,13 @@ private CachedBlob( @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + @UpdateForV9 // we can remove the version field when we no longer need to keep compatibility with <8.12 + final int version = Version.CURRENT.id; builder.startObject(); { builder.field("type", TYPE); builder.field(CREATION_TIME_FIELD, creationTime.toEpochMilli()); - builder.field("version", version.id); + builder.field("version", version); builder.field("repository", repository); builder.startObject("blob"); { @@ -118,10 +102,6 @@ public BytesReference bytes() { return bytes; } - public Version version() { - return version; - } - public Instant creationTime() { return creationTime; } @@ -132,10 +112,6 @@ public static CachedBlob fromSource(final Map source) { if (creationTimeEpochMillis == null) { throw new IllegalStateException("cached blob document does not have the [creation_time] field"); } - final Version version = Version.fromId((Integer) source.get("version")); - if (version == null) { - throw new IllegalStateException("cached blob document does not have the [version] field"); - } final String repository = (String) source.get("repository"); if (repository == null) { throw new IllegalStateException("cached blob document does not have the [repository] field"); @@ -179,7 +155,6 @@ public static CachedBlob fromSource(final Map source) { // TODO add exhaustive verifications (from/to/content.length, version supported, id == recomputed id etc) return new CachedBlob( Instant.ofEpochMilli(creationTimeEpochMillis), - version, repository, name, path, @@ -194,8 +169,6 @@ public String toString() { return "CachedBlob [" + "creationTime=" + creationTime - + ", version=" - + version + ", repository='" + repository + '\'' From 8d0551ecb965a209ab6c9b6afe59d601d94c80ee Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Tue, 5 Dec 2023 11:50:50 +0100 Subject: [PATCH 211/263] ESQL: emit warnings from single-value functions processing multi-values (#102417) When encountering a multi-value, a single-value function (i.e. all non-`mv_xxx()`) returns a `null`. This behaviour is opaque to the user. This PR adds the functionality for these functions to emit a `Warning` header, so the user is informed about the cause for the `null`s. Within testing, there are some differences between the emulated CSV-based tests (`TestPhysical*`) and the REST CSV-tests and thus the exact messages in the warnings: * The REST ones can push operations to Lucene; when this happens, a query containing a negation, `not `, can be translated to a `must_not` query, that will include the `not` in the `Source`. But outside of Lucene, the execution would consider the predicate first, then the negation. So when the predicate contains a SV function, only this part's `Source` will show up in the warning. * When pushed to Lucene, a query is wrapped within the `SingleValueQuery`. This emits now warnings when encountering MVs (and returning no match). However, this only happens once the query that it wraps returns something itself. Comparatively, the `TestPhysical*` filters will issue a warning for every encountered MV (irrespective of sigle values within the MV matching or not). To differentiate between the slightly differing values of the warnings, one can now append the `#[Emulated:` prefix to a warning, followed by the value of the warning for the emulated checks, then a corresponding `]`. Example: `warning:Line 1:24: evaluation of [not(salary_change < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded.]` Closes #98743. --- docs/changelog/102417.yaml | 6 + .../esql/multivalued-fields.asciidoc | 8 + .../org/elasticsearch/TransportVersions.java | 1 + .../compute/gen/EvaluatorImplementer.java | 42 ++--- .../org/elasticsearch/compute/gen/Types.java | 5 - .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 2 +- .../src/main/resources/boolean.csv-spec | 6 +- .../src/main/resources/eval.csv-spec | 8 +- .../src/main/resources/floats.csv-spec | 32 +++- .../src/main/resources/ints.csv-spec | 36 +++-- .../src/main/resources/ip.csv-spec | 52 +++++-- .../src/main/resources/string.csv-spec | 54 +++++-- .../src/main/resources/unsigned_long.csv-spec | 34 +++- .../comparison/EqualsBoolsEvaluator.java | 33 +++- .../comparison/EqualsDoublesEvaluator.java | 33 +++- .../comparison/EqualsIntsEvaluator.java | 33 +++- .../comparison/EqualsKeywordsEvaluator.java | 33 +++- .../comparison/EqualsLongsEvaluator.java | 33 +++- .../GreaterThanDoublesEvaluator.java | 33 +++- .../comparison/GreaterThanIntsEvaluator.java | 33 +++- .../GreaterThanKeywordsEvaluator.java | 33 +++- .../comparison/GreaterThanLongsEvaluator.java | 33 +++- .../GreaterThanOrEqualDoublesEvaluator.java | 33 +++- .../GreaterThanOrEqualIntsEvaluator.java | 33 +++- .../GreaterThanOrEqualKeywordsEvaluator.java | 33 +++- .../GreaterThanOrEqualLongsEvaluator.java | 33 +++- .../comparison/LessThanDoublesEvaluator.java | 33 +++- .../comparison/LessThanIntsEvaluator.java | 33 +++- .../comparison/LessThanKeywordsEvaluator.java | 33 +++- .../comparison/LessThanLongsEvaluator.java | 33 +++- .../LessThanOrEqualDoublesEvaluator.java | 33 +++- .../LessThanOrEqualIntsEvaluator.java | 33 +++- .../LessThanOrEqualKeywordsEvaluator.java | 33 +++- .../LessThanOrEqualLongsEvaluator.java | 33 +++- .../comparison/NotEqualsBoolsEvaluator.java | 33 +++- .../comparison/NotEqualsDoublesEvaluator.java | 33 +++- .../comparison/NotEqualsIntsEvaluator.java | 33 +++- .../NotEqualsKeywordsEvaluator.java | 33 +++- .../comparison/NotEqualsLongsEvaluator.java | 33 +++- .../operator/logical/NotEvaluator.java | 25 ++- .../operator/regex/RegexMatchEvaluator.java | 27 +++- .../conditional/GreatestBooleanEvaluator.java | 24 ++- .../GreatestBytesRefEvaluator.java | 24 ++- .../conditional/GreatestDoubleEvaluator.java | 24 ++- .../conditional/GreatestIntEvaluator.java | 24 ++- .../conditional/GreatestLongEvaluator.java | 24 ++- .../conditional/LeastBooleanEvaluator.java | 24 ++- .../conditional/LeastBytesRefEvaluator.java | 24 ++- .../conditional/LeastDoubleEvaluator.java | 24 ++- .../scalar/conditional/LeastIntEvaluator.java | 25 ++- .../conditional/LeastLongEvaluator.java | 24 ++- .../date/DateExtractConstantEvaluator.java | 26 +++- .../scalar/date/DateExtractEvaluator.java | 18 ++- .../date/DateFormatConstantEvaluator.java | 27 +++- .../scalar/date/DateFormatEvaluator.java | 33 +++- .../date/DateParseConstantEvaluator.java | 9 +- .../scalar/date/DateParseEvaluator.java | 18 ++- .../scalar/date/DateTruncEvaluator.java | 27 +++- .../function/scalar/date/NowEvaluator.java | 14 +- .../scalar/ip/CIDRMatchEvaluator.java | 33 +++- .../scalar/math/AbsDoubleEvaluator.java | 24 ++- .../function/scalar/math/AbsIntEvaluator.java | 25 ++- .../scalar/math/AbsLongEvaluator.java | 25 ++- .../function/scalar/math/AcosEvaluator.java | 10 +- .../function/scalar/math/AsinEvaluator.java | 10 +- .../function/scalar/math/Atan2Evaluator.java | 35 ++++- .../function/scalar/math/AtanEvaluator.java | 25 ++- .../scalar/math/CastIntToDoubleEvaluator.java | 25 ++- .../scalar/math/CastIntToLongEvaluator.java | 25 ++- .../math/CastIntToUnsignedLongEvaluator.java | 24 ++- .../math/CastLongToDoubleEvaluator.java | 24 ++- .../math/CastLongToUnsignedLongEvaluator.java | 24 ++- .../CastUnsignedLongToDoubleEvaluator.java | 24 ++- .../scalar/math/CeilDoubleEvaluator.java | 25 ++- .../function/scalar/math/CosEvaluator.java | 25 ++- .../function/scalar/math/CoshEvaluator.java | 10 +- .../scalar/math/FloorDoubleEvaluator.java | 25 ++- .../scalar/math/IsFiniteEvaluator.java | 25 ++- .../scalar/math/IsInfiniteEvaluator.java | 25 ++- .../function/scalar/math/IsNaNEvaluator.java | 25 ++- .../scalar/math/Log10DoubleEvaluator.java | 10 +- .../scalar/math/Log10IntEvaluator.java | 10 +- .../scalar/math/Log10LongEvaluator.java | 10 +- .../math/Log10UnsignedLongEvaluator.java | 10 +- .../function/scalar/math/PowEvaluator.java | 19 ++- .../function/scalar/math/PowIntEvaluator.java | 146 ++++++++++++++++++ .../scalar/math/PowLongEvaluator.java | 146 ++++++++++++++++++ .../scalar/math/RoundDoubleEvaluator.java | 33 +++- .../math/RoundDoubleNoDecimalsEvaluator.java | 24 ++- .../scalar/math/RoundIntEvaluator.java | 33 +++- .../scalar/math/RoundLongEvaluator.java | 33 +++- .../math/RoundUnsignedLongEvaluator.java | 33 +++- .../function/scalar/math/SinEvaluator.java | 25 ++- .../function/scalar/math/SinhEvaluator.java | 10 +- .../scalar/math/SqrtDoubleEvaluator.java | 10 +- .../scalar/math/SqrtIntEvaluator.java | 10 +- .../scalar/math/SqrtLongEvaluator.java | 10 +- .../math/SqrtUnsignedLongEvaluator.java | 24 ++- .../function/scalar/math/TanEvaluator.java | 25 ++- .../function/scalar/math/TanhEvaluator.java | 25 ++- .../scalar/string/ConcatEvaluator.java | 26 +++- .../scalar/string/EndsWithEvaluator.java | 33 +++- .../scalar/string/LTrimEvaluator.java | 25 ++- .../function/scalar/string/LeftEvaluator.java | 33 +++- .../scalar/string/LengthEvaluator.java | 25 ++- .../scalar/string/RTrimEvaluator.java | 25 ++- .../string/ReplaceConstantEvaluator.java | 19 ++- .../scalar/string/ReplaceEvaluator.java | 28 +++- .../scalar/string/RightEvaluator.java | 33 +++- .../string/SplitSingleByteEvaluator.java | 24 ++- .../scalar/string/SplitVariableEvaluator.java | 33 +++- .../scalar/string/StartsWithEvaluator.java | 33 +++- .../scalar/string/SubstringEvaluator.java | 42 ++++- .../string/SubstringNoLengthEvaluator.java | 33 +++- .../function/scalar/string/TrimEvaluator.java | 25 ++- .../arithmetic/AddDatetimesEvaluator.java | 10 +- .../arithmetic/AddDoublesEvaluator.java | 33 +++- .../operator/arithmetic/AddIntsEvaluator.java | 19 ++- .../arithmetic/AddLongsEvaluator.java | 19 ++- .../arithmetic/AddUnsignedLongsEvaluator.java | 19 ++- .../arithmetic/DivDoublesEvaluator.java | 33 +++- .../operator/arithmetic/DivIntsEvaluator.java | 19 ++- .../arithmetic/DivLongsEvaluator.java | 19 ++- .../arithmetic/DivUnsignedLongsEvaluator.java | 19 ++- .../arithmetic/ModDoublesEvaluator.java | 33 +++- .../operator/arithmetic/ModIntsEvaluator.java | 19 ++- .../arithmetic/ModLongsEvaluator.java | 19 ++- .../arithmetic/ModUnsignedLongsEvaluator.java | 19 ++- .../arithmetic/MulDoublesEvaluator.java | 33 +++- .../operator/arithmetic/MulIntsEvaluator.java | 19 ++- .../arithmetic/MulLongsEvaluator.java | 19 ++- .../arithmetic/MulUnsignedLongsEvaluator.java | 19 ++- .../arithmetic/NegDoublesEvaluator.java | 25 ++- .../operator/arithmetic/NegIntsEvaluator.java | 10 +- .../arithmetic/NegLongsEvaluator.java | 10 +- .../arithmetic/SubDatetimesEvaluator.java | 10 +- .../arithmetic/SubDoublesEvaluator.java | 33 +++- .../operator/arithmetic/SubIntsEvaluator.java | 19 ++- .../arithmetic/SubLongsEvaluator.java | 19 ++- .../arithmetic/SubUnsignedLongsEvaluator.java | 19 ++- .../xpack/esql/evaluator/EvalMapper.java | 1 + .../operator/comparison/ComparisonMapper.java | 52 +++---- .../predicate/operator/regex/RegexMapper.java | 3 +- .../function/scalar/conditional/Greatest.java | 10 +- .../function/scalar/conditional/Least.java | 10 +- .../function/scalar/date/DateExtract.java | 2 +- .../function/scalar/date/DateFormat.java | 5 +- .../function/scalar/date/DateTrunc.java | 10 +- .../expression/function/scalar/date/Now.java | 2 +- .../function/scalar/ip/CIDRMatch.java | 1 + .../expression/function/scalar/math/Abs.java | 6 +- .../math/AbstractTrigonometricFunction.java | 2 +- .../expression/function/scalar/math/Atan.java | 2 +- .../function/scalar/math/Atan2.java | 6 +- .../function/scalar/math/AutoBucket.java | 6 +- .../expression/function/scalar/math/Cast.java | 15 +- .../expression/function/scalar/math/Ceil.java | 2 +- .../expression/function/scalar/math/Cos.java | 2 +- .../function/scalar/math/Floor.java | 2 +- .../function/scalar/math/IsFinite.java | 2 +- .../function/scalar/math/IsInfinite.java | 2 +- .../function/scalar/math/IsNaN.java | 2 +- .../expression/function/scalar/math/Pow.java | 4 +- .../function/scalar/math/Round.java | 19 ++- .../expression/function/scalar/math/Sin.java | 2 +- .../expression/function/scalar/math/Sqrt.java | 2 +- .../expression/function/scalar/math/Tan.java | 2 +- .../expression/function/scalar/math/Tanh.java | 2 +- .../function/scalar/string/Concat.java | 2 +- .../function/scalar/string/EndsWith.java | 2 +- .../function/scalar/string/LTrim.java | 2 +- .../function/scalar/string/Left.java | 1 + .../function/scalar/string/Length.java | 2 +- .../function/scalar/string/RTrim.java | 2 +- .../function/scalar/string/Right.java | 1 + .../function/scalar/string/Split.java | 4 +- .../function/scalar/string/StartsWith.java | 2 +- .../function/scalar/string/Substring.java | 4 +- .../function/scalar/string/Trim.java | 2 +- .../predicate/operator/arithmetic/Add.java | 2 +- .../predicate/operator/arithmetic/Div.java | 2 +- .../arithmetic/EsqlArithmeticOperation.java | 4 +- .../predicate/operator/arithmetic/Mod.java | 2 +- .../predicate/operator/arithmetic/Mul.java | 2 +- .../predicate/operator/arithmetic/Neg.java | 2 +- .../predicate/operator/arithmetic/Sub.java | 2 +- .../xpack/esql/io/stream/PlanStreamInput.java | 44 +----- .../esql/io/stream/PlanStreamOutput.java | 6 +- .../esql/querydsl/query/SingleValueQuery.java | 129 ++++++++++++---- .../elasticsearch/xpack/esql/CsvTests.java | 2 +- .../function/AbstractFunctionTestCase.java | 9 ++ .../function/scalar/math/RoundTests.java | 3 +- .../operator/arithmetic/AddTests.java | 3 +- .../operator/arithmetic/SubTests.java | 3 +- .../LocalPhysicalPlanOptimizerTests.java | 19 ++- .../xpack/esql/planner/FilterTests.java | 71 +++++---- .../SingleValueQuerySerializationTests.java | 9 +- .../querydsl/query/SingleValueQueryTests.java | 68 ++++---- .../elasticsearch/xpack/ql/tree/Source.java | 1 + .../xpack/ql/util/SourceUtils.java | 93 +++++++++++ .../elasticsearch/xpack/ql/CsvSpecReader.java | 35 ++++- 201 files changed, 3626 insertions(+), 785 deletions(-) create mode 100644 docs/changelog/102417.yaml create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java create mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SourceUtils.java diff --git a/docs/changelog/102417.yaml b/docs/changelog/102417.yaml new file mode 100644 index 0000000000000..09c1a4f49dbfd --- /dev/null +++ b/docs/changelog/102417.yaml @@ -0,0 +1,6 @@ +pr: 102417 +summary: "ESQL: emit warnings from single-value functions processing multi-values" +area: ES|QL +type: feature +issues: + - 98743 diff --git a/docs/reference/esql/multivalued-fields.asciidoc b/docs/reference/esql/multivalued-fields.asciidoc index 5e48eb4ef8af8..6cb7755b91ce9 100644 --- a/docs/reference/esql/multivalued-fields.asciidoc +++ b/docs/reference/esql/multivalued-fields.asciidoc @@ -180,12 +180,20 @@ POST /mv/_bulk?refresh { "a": 1, "b": [2, 1] } { "index" : {} } { "a": 2, "b": 3 } +---- +[source,console] +---- POST /_query { "query": "FROM mv | EVAL b + 2, a + b | LIMIT 4" } ---- +// TEST[continued] +// TEST[warning:Line 1:16: evaluation of [b + 2] failed, treating result as null. Only first 20 failures recorded.] +// TEST[warning:Line 1:16: java.lang.IllegalArgumentException: single-value function encountered multi-value] +// TEST[warning:Line 1:23: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded.] +// TEST[warning:Line 1:23: java.lang.IllegalArgumentException: single-value function encountered multi-value] [source,console-result] ---- diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 7c3568986dccd..30769371f3608 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -187,6 +187,7 @@ static TransportVersion def(int id) { public static final TransportVersion INFERENCE_USAGE_ADDED = def(8_554_00_0); public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_555_00_0); public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_DSL_STATUS = def(8_556_00_0); + public static final TransportVersion SOURCE_IN_SINGLE_VALUE_QUERY_ADDED = def(8_557_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index 1b44e0d274e32..f283e3b59bb63 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -86,12 +86,9 @@ private TypeSpec type() { builder.addJavadoc("This class is generated. Do not edit it."); builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(EXPRESSION_EVALUATOR); - builder.addType(factory()); - if (processFunction.warnExceptions.isEmpty() == false) { - builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); - } + builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); processFunction.args.stream().forEach(a -> a.declareField(builder)); builder.addField(DRIVER_CONTEXT, "driverContext", Modifier.PRIVATE, Modifier.FINAL); @@ -108,10 +105,8 @@ private TypeSpec type() { private MethodSpec ctor() { MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); - if (processFunction.warnExceptions.isEmpty() == false) { - builder.addParameter(SOURCE, "source"); - builder.addStatement("this.warnings = new Warnings(source)"); - } + builder.addParameter(SOURCE, "source"); + builder.addStatement("this.warnings = new Warnings(source)"); processFunction.args.stream().forEach(a -> a.implementCtor(builder)); builder.addParameter(DRIVER_CONTEXT, "driverContext"); @@ -217,8 +212,23 @@ private MethodSpec realEval(boolean blockStyle) { } private static void skipNull(MethodSpec.Builder builder, String value) { - builder.beginControlFlow("if ($N.isNull(p) || $N.getValueCount(p) != 1)", value, value); + builder.beginControlFlow("if ($N.isNull(p))", value); + { + builder.addStatement("result.appendNull()"); + builder.addStatement("continue position"); + } + builder.endControlFlow(); + builder.beginControlFlow("if ($N.getValueCount(p) != 1)", value); { + builder.beginControlFlow("if ($N.getValueCount(p) > 1)", value); + { + builder.addStatement( + // TODO: reflection on SingleValueQuery.MULTI_VALUE_WARNING? + "warnings.registerException(new $T(\"single-value function encountered multi-value\"))", + IllegalArgumentException.class + ); + } + builder.endControlFlow(); builder.addStatement("result.appendNull()"); builder.addStatement("continue position"); } @@ -259,9 +269,7 @@ private TypeSpec factory() { builder.addSuperinterface(EXPRESSION_EVALUATOR_FACTORY); builder.addModifiers(Modifier.STATIC); - if (processFunction.warnExceptions.isEmpty() == false) { - builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); - } + builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); processFunction.args.stream().forEach(a -> a.declareFactoryField(builder)); builder.addMethod(factoryCtor()); @@ -273,10 +281,8 @@ private TypeSpec factory() { private MethodSpec factoryCtor() { MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); - if (processFunction.warnExceptions.isEmpty() == false) { - builder.addParameter(SOURCE, "source"); - builder.addStatement("this.source = source"); - } + builder.addParameter(SOURCE, "source"); + builder.addStatement("this.source = source"); processFunction.args.stream().forEach(a -> a.implementFactoryCtor(builder)); return builder.build(); @@ -289,9 +295,7 @@ private MethodSpec factoryGet() { builder.returns(implementation); List args = new ArrayList<>(); - if (processFunction.warnExceptions.isEmpty() == false) { - args.add("source"); - } + args.add("source"); for (ProcessFunctionArg arg : processFunction.args) { String invocation = arg.factoryInvocation(builder); if (invocation != null) { diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java index 1a09160dae3cd..e0533c68afd18 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java @@ -35,7 +35,6 @@ public class Types { static final ClassName VECTOR = ClassName.get(DATA_PACKAGE, "Vector"); static final ClassName BIG_ARRAYS = ClassName.get("org.elasticsearch.common.util", "BigArrays"); - static final ClassName BYTES_REF_ARRAY = ClassName.get("org.elasticsearch.common.util", "BytesRefArray"); static final ClassName BOOLEAN_BLOCK = ClassName.get(DATA_PACKAGE, "BooleanBlock"); static final ClassName BYTES_REF_BLOCK = ClassName.get(DATA_PACKAGE, "BytesRefBlock"); @@ -86,10 +85,6 @@ public class Types { static final ClassName LONG_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantLongVector"); static final ClassName DOUBLE_CONSTANT_VECTOR = ClassName.get(DATA_PACKAGE, "ConstantDoubleVector"); - static final ClassName INT_ARRAY_STATE = ClassName.get(AGGREGATION_PACKAGE, "IntArrayState"); - static final ClassName LONG_ARRAY_STATE = ClassName.get(AGGREGATION_PACKAGE, "LongArrayState"); - static final ClassName DOUBLE_ARRAY_STATE = ClassName.get(AGGREGATION_PACKAGE, "DoubleArrayState"); - static final ClassName AGGREGATOR_FUNCTION = ClassName.get(AGGREGATION_PACKAGE, "AggregatorFunction"); static final ClassName AGGREGATOR_FUNCTION_SUPPLIER = ClassName.get(AGGREGATION_PACKAGE, "AggregatorFunctionSupplier"); static final ClassName GROUPING_AGGREGATOR_FUNCTION = ClassName.get(AGGREGATION_PACKAGE, "GroupingAggregatorFunction"); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index af3a6804f2220..734f26fab547a 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -105,7 +105,7 @@ protected void shouldSkipTest(String testName) { protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); - Map answer = runEsql(builder.query(testCase.query).build(), testCase.expectedWarnings); + Map answer = runEsql(builder.query(testCase.query).build(), testCase.expectedWarnings(false)); var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults); var metadata = answer.get("columns"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index 7426f07042962..3d9f9aa6e1c27 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -62,9 +62,13 @@ avg(salary):double | always_false:boolean ; -in +in#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true +warning:Line 1:63: evaluation of [is_rehired in (still_hired, true)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:63: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 1:105: evaluation of [is_rehired != still_hired] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:105: java.lang.IllegalArgumentException: single-value function encountered multi-value emp_no:integer |is_rehired:boolean |still_hired:boolean 10021 |true |false 10029 |true |false diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 7a5a90fb398eb..e6486960c7e04 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -200,8 +200,14 @@ Chirstian. |Chirstian.Koblick|Chirstian.KoblickChirstian.|Chirstian Kyoichi. |Kyoichi.Maliniak |Kyoichi.MaliniakKyoichi. |Kyoichi ; -roundArrays +roundArrays#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] row a = [1.2], b = [2.4, 7.9] | eval c = round(a), d = round(b), e = round([1.2]), f = round([1.2, 4.6]), g = round([1.14], 1), h = round([1.14], [1, 2]); +warning:Line 1:88: evaluation of [round([1.2, 4.6])] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:88: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 1:133: evaluation of [round([1.14], [1, 2])] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:133: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 1:56: evaluation of [round(b)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:56: java.lang.IllegalArgumentException: single-value function encountered multi-value a:double | b:double | c:double | d: double | e:double | f:double | g:double | h:double 1.2 | [2.4, 7.9] | 1.0 | null | 1.0 | null | 1.1 | null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index bef977b8eea98..f56266f868d44 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -91,8 +91,10 @@ int:integer |dbl:double 520128 |520128 ; -lessThanMultivalue +lessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change:double @@ -103,8 +105,10 @@ emp_no:integer |salary_change:double 10030 |-0.4 ; -greaterThanMultivalue +greaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change:double @@ -115,16 +119,20 @@ emp_no:integer |salary_change:double 10079 |7.58 ; -equalToMultivalue +equalToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; +warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change:double 10001 |1.19 ; -equalToOrEqualToMultivalue +equalToOrEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; +warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries are filtered out emp_no:integer |salary_change:double @@ -132,8 +140,10 @@ emp_no:integer |salary_change:double 10079 |7.58 ; -inMultivalue +inMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; +warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries are filtered out emp_no:integer |salary_change:double @@ -141,8 +151,10 @@ emp_no:integer |salary_change:double 10079 |7.58 ; -notLessThanMultivalue +notLessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change:double @@ -153,8 +165,10 @@ emp_no:integer |salary_change:double 10079 | 7.58 ; -notGreaterThanMultivalue +notGreaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change:double @@ -165,8 +179,10 @@ emp_no:integer |salary_change:double 10030 | -0.4 ; -notEqualToMultivalue +notEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change == 1.19)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change:double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 3e28c8bc2cb9b..887d931f4cd5c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,9 @@ // Integral types-specific tests -inLongAndInt +inLongAndInt#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; +warning:Line 1:24: evaluation of [avg_worked_seconds in (372957040, salary_change.long, 236703986)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value emp_no:integer |avg_worked_seconds:long 10017 |236703986 @@ -246,8 +248,10 @@ d:double |d2i:integer |overflow:integer 123.4 |123 |null ; -lessThanMultivalue +lessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -258,8 +262,10 @@ emp_no:integer |salary_change.int:integer 10030 | 0 ; -greaterThanMultivalue +greaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -270,8 +276,10 @@ emp_no:integer |salary_change.int:integer 10086 |13 ; -equalToMultivalue +equalToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; +warning:Line 1:24: evaluation of [salary_change.int == 0] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -281,8 +289,10 @@ emp_no:integer |salary_change.int:integer 10093 | 0 ; -equalToOrEqualToMultivalue +equalToOrEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; +warning:Line 1:24: evaluation of [salary_change.int] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries are filtered out emp_no:integer |salary_change.int:integer @@ -290,8 +300,10 @@ emp_no:integer |salary_change.int:integer 10044 |8 ; -inMultivalue +inMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; +warning:Line 1:24: evaluation of [salary_change.int in (1, 7)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued salaries are filtered out emp_no:integer |salary_change.int:integer @@ -299,8 +311,10 @@ emp_no:integer |salary_change.int:integer 10079 |7 ; -notLessThanMultivalue +notLessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change.int < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -311,8 +325,10 @@ emp_no:integer |salary_change.int:integer 10079 | 7 ; -notGreaterThanMultivalue +notGreaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change.int > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't less than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer @@ -323,8 +339,10 @@ emp_no:integer |salary_change.int:integer 10020 | -5 ; -notEqualToMultivalue +notEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; +warning:Line 1:24: evaluation of [not(salary_change.int == 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int == 1] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued salaries aren't greater than 1 - they are null - so they aren't included emp_no:integer |salary_change.int:integer diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index f17af749cc85f..02e9db6ededf1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -15,8 +15,10 @@ eth1 |epsilon |null eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -equals +equals#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; +warning:Line 1:38: evaluation of [ip0 == ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth0 |alpha |127.0.0.1 |127.0.0.1 @@ -55,8 +57,10 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9 eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0]|[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -lessThen +lessThan#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | sort host, card | where ip0 < ip1 | keep card, host, ip0, ip1; +warning:Line 1:38: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 @@ -64,8 +68,10 @@ eth1 |beta |127.0.0.1 |128.0.0.1 lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:feb9 ; -notEquals +notEquals#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; +warning:Line 1:43: evaluation of [ip0 != ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:43: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth0 |beta |127.0.0.1 |::1 @@ -112,9 +118,11 @@ ip0:ip |ip1:ip null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; -conditional +conditional#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true +warning:Line 1:27: evaluation of [ip0==ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:27: java.lang.IllegalArgumentException: single-value function encountered multi-value eq:ip |ip0:ip |ip1:ip 127.0.0.1 |127.0.0.1 |127.0.0.1 @@ -129,9 +137,13 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0]|[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -in +in#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true +warning:Line 1:27: evaluation of [ip0==ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:27: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 1:55: evaluation of [eq in (ip0, ip1)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:55: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip |eq:ip eth0 |alpha |127.0.0.1 |127.0.0.1 |127.0.0.1 @@ -144,16 +156,20 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9 eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1|fe80::cae2:65ff:fece:fec1 ; -cidrMatchSimple +cidrMatchSimple#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; +warning:Line 1:20: evaluation of [cidr_match(ip1, \"127.0.0.2/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 ; -cidrMatchNullField +cidrMatchNullField#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [cidr_match(ip0, \"127.0.0.2/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece:fec0, fe80::cae2:65ff:fece:fec1]|fe80::cae2:65ff:fece:fec1 @@ -161,27 +177,33 @@ eth1 |epsilon |null eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; -cdirMatchMultipleArgs +cdirMatchMultipleArgs#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where cidr_match(ip1, "127.0.0.2/32", "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [cidr_match(ip1, \"127.0.0.2/32\", \"127.0.0.3/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; -cidrMatchFunctionArg +cidrMatchFunctionArg#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [cidr_match(ip1, concat(\"127.0.0.2\", \"/32\"), \"127.0.0.3/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; -cidrMatchFieldArg +cidrMatchFieldArg#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:44: evaluation of [cidr_match(ip1, cidr, \"127.0.0.3/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:44: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 @@ -215,15 +237,19 @@ str1:keyword |str2:keyword |ip1:ip |ip2:ip pushDownIP from hosts | where ip1 == to_ip("::1") | keep card, host, ip0, ip1; ignoreOrder:true +warning:#[Emulated:Line 1:20: evaluation of [ip1 == to_ip(\"::1\")] failed, treating result as null. Only first 20 failures recorded.] +warning:#[Emulated:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value] card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |alpha |::1 |::1 eth0 |beta |127.0.0.1 |::1 ; -pushDownIPWithIn +pushDownIPWithIn#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [ip1 in (to_ip(\"::1\"), to_ip(\"127.0.0.1\"))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth0 |alpha |127.0.0.1 |127.0.0.1 @@ -231,9 +257,11 @@ eth1 |alpha |::1 |::1 eth0 |beta |127.0.0.1 |::1 ; -pushDownIPWithComparision +pushDownIPWithComparision#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true +warning:Line 1:20: evaluation of [ip1 > to_ip(\"127.0.0.1\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |ip1:ip eth1 |127.0.0.2 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 2d1db44eea7be..1f78a63c8c4d8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -299,9 +299,11 @@ emp_no:integer | name:keyword ; // Note: no matches in MV returned -in +in#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true +warning:Line 1:24: evaluation of [job_positions in (\"Internship\", first_name)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value emp_no:integer |job_positions:keyword 10048 |Internship @@ -461,8 +463,10 @@ emp_no:integer |positions:keyword 10005 |null |null ; -lessThanMultivalue +lessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; +warning:Line 1:24: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -470,8 +474,10 @@ emp_no:integer |job_positions:keyword 10068 |Architect ; -greaterThanMultivalue +greaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; +warning:Line 1:24: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -483,16 +489,20 @@ emp_no:integer |job_positions:keyword 10021 |Support Engineer ; -equalToMultivalue +equalToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; +warning:Line 1:24: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword 10025 |Accountant ; -equalToOrEqualToMultivalue +equalToOrEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; +warning:Line 1:24: evaluation of [job_positions] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -500,8 +510,10 @@ emp_no:integer |job_positions:keyword 10025 |Accountant ; -inMultivalue +inMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; +warning:Line 1:24: evaluation of [job_positions in (\"Accountant\", \"Tech Lead\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -509,8 +521,10 @@ emp_no:integer |job_positions:keyword 10025 |Accountant ; -notLessThanMultivalue +notLessThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; +warning:Line 1:24: evaluation of [not(job_positions < \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -522,8 +536,10 @@ emp_no:integer |job_positions:keyword 10021 |Support Engineer ; -notGreaterThanMultivalue +notGreaterThanMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; +warning:Line 1:24: evaluation of [not(job_positions > \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -531,8 +547,10 @@ emp_no:integer |job_positions:keyword 10068 |Architect ; -notEqualToMultivalue +notEqualToMultivalue#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; +warning:Line 1:24: evaluation of [not(job_positions == \"Accountant\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded.] +warning:Line 1:24: java.lang.IllegalArgumentException: single-value function encountered multi-value#[Emulated:Line 1:28: java.lang.IllegalArgumentException: single-value function encountered multi-value] // Note that multivalued job_positions aren't included because they aren't less than or greater than C - that comparison is null emp_no:integer |job_positions:keyword @@ -662,9 +680,11 @@ beta | Kubernetes cluster | beta k8s server beta | Kubernetes cluster | [beta k8s server, beta k8s server2] ; -lengthOfText +lengthOfText#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true +warning:Line 1:73: evaluation of [length(description)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:73: java.lang.IllegalArgumentException: single-value function encountered multi-value l1:integer | l2:integer null | 19 @@ -672,9 +692,11 @@ null | 19 17 | null ; -startsWithText +startsWithText#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true +warning:Line 1:84: evaluation of [starts_with(description, host)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:84: java.lang.IllegalArgumentException: single-value function encountered multi-value l1:boolean | l2:boolean null | true @@ -682,9 +704,11 @@ false | null false | null ; -substringOfText +substringOfText#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true +warning:Line 1:82: evaluation of [substring(description, 0, 5)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:82: java.lang.IllegalArgumentException: single-value function encountered multi-value l1:keyword | l2:keyword null | epsil @@ -692,8 +716,10 @@ Gatew | null Gatew | null ; -concatOfText -from hosts | where host == "epsilon" | eval l1 = concat(host,"/", host_group), l2 = concat(host_group,"/", description) | sort l1 | keep l1, l2; +concatOfText#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] +from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; +warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:86: java.lang.IllegalArgumentException: single-value function encountered multi-value l1:keyword | l2:keyword epsilon/Gateway instances | null diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index 2238b0c086d9e..523a0ef7c9eed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -45,8 +45,10 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; 2017-11-10T20:26:21.000Z|17067060651018256448|1722789377000665830 |67 |OK ; -filterPushDownGT +filterPushDownGT#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; +warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value bytes_in:ul | div:ul |id:i 74330435873664882 |74 |82 @@ -63,8 +65,12 @@ from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval 2703254959364209157|2703 |18 ; -filterPushDownRange +filterPushDownRange#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; +warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:#[Emulated:Line 1:67: evaluation of [bytes_in <= to_ul(316080452389500167)] failed, treating result as null. Only first 20 failures recorded.] +warning:#[Emulated:Line 1:67: java.lang.IllegalArgumentException: single-value function encountered multi-value] bytes_in:ul | div:ul |id:i 74330435873664882 |74 |82 @@ -73,9 +79,11 @@ from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to 316080452389500167 |316 |25 ; -filterPushDownIn +filterPushDownIn#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; +warning:Line 1:22: evaluation of [bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value bytes_in:ul |id:i 74330435873664882 |82 @@ -83,15 +91,19 @@ from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(15455196215089 195161570976258241 |88 ; -filterOnFieldsEquality +filterOnFieldsEquality#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | where bytes_in == bytes_out; +warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value @timestamp:date | bytes_in:ul | bytes_out:ul | id:i | status:k 2017-11-10T21:12:17.000Z|16002960716282089759|16002960716282089759|34 |OK ; -filterOnFieldsInequality +filterOnFieldsInequality#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; +warning:Line 1:32: evaluation of [bytes_in < bytes_out] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:32: java.lang.IllegalArgumentException: single-value function encountered multi-value @timestamp:date | bytes_in:ul | bytes_out:ul | id:i | status:k | b_in:ul | b_out:ul 2017-11-10T21:15:54.000Z|4348801185987554667 |12749081495402663265|1 |OK |4348 |12749 @@ -117,8 +129,10 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des 1 |18317075104972913640 ; -case +case#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); +warning:Line 1:27: evaluation of [bytes_in == to_ul(154551962150890564)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:27: java.lang.IllegalArgumentException: single-value function encountered multi-value @timestamp:date | bytes_in:ul | bytes_out:ul | id:i | status:k 2017-11-10T20:21:58.000Z|154551962150890564|9382204513185396493|63 |OK @@ -138,17 +152,21 @@ FROM ul_logs 2017-11-10T20:34:43.000Z | 17764691215469285192 | 1.75E19 ; -toDegrees +toDegrees#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; +warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value bytes_in:ul | deg:double 16002960716282089759 | 9.169021087566165E20 ; -toRadians +toRadians#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; +warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:22: java.lang.IllegalArgumentException: single-value function encountered multi-value bytes_in:ul | rad:double 16002960716282089759 | 2.79304354566432608E17 diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java index b5b05d6d395fa..ef26fb4d000dc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsBoolsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsBoolsEvaluator get(DriverContext context) { - return new EqualsBoolsEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsBoolsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java index b4a0f127c8fa1..d5b2e84384a03 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsDoublesEvaluator get(DriverContext context) { - return new EqualsDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java index 8e491e14c6dc3..c2c9c7ce2b19c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsIntsEvaluator get(DriverContext context) { - return new EqualsIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java index 0fe04c80a66f1..8dc15ba6d2fec 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsKeywordsEvaluator get(DriverContext context) { - return new EqualsKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java index 9e656111ee074..870d7c546010f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Equals}. * This class is generated. Do not edit it. */ public final class EqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public EqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public EqualsLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public EqualsLongsEvaluator get(DriverContext context) { - return new EqualsLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new EqualsLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index 64ab3a28df39c..051df8053417f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. * This class is generated. Do not edit it. */ public final class GreaterThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanDoublesEvaluator get(DriverContext context) { - return new GreaterThanDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java index 7795e9b5f1b4a..c6de582ef2909 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. * This class is generated. Do not edit it. */ public final class GreaterThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanIntsEvaluator get(DriverContext context) { - return new GreaterThanIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index 21ae9b1464d2a..cf243b68e473c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. * This class is generated. Do not edit it. */ public final class GreaterThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanKeywordsEvaluator get(DriverContext context) { - return new GreaterThanKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java index b2b559c715126..5f1a679c76a31 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThan}. * This class is generated. Do not edit it. */ public final class GreaterThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanLongsEvaluator get(DriverContext context) { - return new GreaterThanLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index b73c6e359afd2..c36031c321422 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. * This class is generated. Do not edit it. */ public final class GreaterThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanOrEqualDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanOrEqualDoublesEvaluator get(DriverContext context) { - return new GreaterThanOrEqualDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanOrEqualDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index 2a77ee8f068e2..2b64cfcf9ea49 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. * This class is generated. Do not edit it. */ public final class GreaterThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanOrEqualIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanOrEqualIntsEvaluator get(DriverContext context) { - return new GreaterThanOrEqualIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanOrEqualIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index 6909a3b761dd3..b8b2c9b6d4459 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. * This class is generated. Do not edit it. */ public final class GreaterThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanOrEqualKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanOrEqualKeywordsEvaluator get(DriverContext context) { - return new GreaterThanOrEqualKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanOrEqualKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index 71a68b0bb95e6..907a29c8c904d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link GreaterThanOrEqual}. * This class is generated. Do not edit it. */ public final class GreaterThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public GreaterThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public GreaterThanOrEqualLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public GreaterThanOrEqualLongsEvaluator get(DriverContext context) { - return new GreaterThanOrEqualLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new GreaterThanOrEqualLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java index f4990fe06f6cb..c3cf8293071e3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. * This class is generated. Do not edit it. */ public final class LessThanDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanDoublesEvaluator get(DriverContext context) { - return new LessThanDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java index db623747a5e61..a66ac0e889090 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. * This class is generated. Do not edit it. */ public final class LessThanIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanIntsEvaluator get(DriverContext context) { - return new LessThanIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java index be658c3da46ec..a0951d9a09382 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. * This class is generated. Do not edit it. */ public final class LessThanKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanKeywordsEvaluator get(DriverContext context) { - return new LessThanKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java index 444c715c753cd..f0e7ac134410b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThan}. * This class is generated. Do not edit it. */ public final class LessThanLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanLongsEvaluator get(DriverContext context) { - return new LessThanLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index bffdf4a80649c..cf12098962599 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. * This class is generated. Do not edit it. */ public final class LessThanOrEqualDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanOrEqualDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanOrEqualDoublesEvaluator get(DriverContext context) { - return new LessThanOrEqualDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanOrEqualDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index dd47aab76f21c..ffa8ab38bc2eb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. * This class is generated. Do not edit it. */ public final class LessThanOrEqualIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanOrEqualIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanOrEqualIntsEvaluator get(DriverContext context) { - return new LessThanOrEqualIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanOrEqualIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index e7a37b3f0fc41..2e7aafeb2d805 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. * This class is generated. Do not edit it. */ public final class LessThanOrEqualKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanOrEqualKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanOrEqualKeywordsEvaluator get(DriverContext context) { - return new LessThanOrEqualKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanOrEqualKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index fec54d164ac3b..9c211610da814 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LessThanOrEqual}. * This class is generated. Do not edit it. */ public final class LessThanOrEqualLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public LessThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public LessThanOrEqualLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public LessThanOrEqualLongsEvaluator get(DriverContext context) { - return new LessThanOrEqualLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new LessThanOrEqualLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index a8a8882bf54a4..7d2067fe6bdbe 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsBoolsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsBoolsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, BooleanBlock lhsBlock, BooleanBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsBoolsEvaluator get(DriverContext context) { - return new NotEqualsBoolsEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsBoolsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index cf5d7a5717600..174d3df53853b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsDoublesEvaluator get(DriverContext context) { - return new NotEqualsDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java index 128118d957222..03abc111d820e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsIntsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsIntsEvaluator get(DriverContext context) { - return new NotEqualsIntsEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsIntsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index c2d12fe5840ab..919aeb4099b1f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsKeywordsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsKeywordsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock lhsBlock, BytesRefBloc BytesRef lhsScratch = new BytesRef(); BytesRef rhsScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -93,19 +113,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsKeywordsEvaluator get(DriverContext context) { - return new NotEqualsKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsKeywordsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java index 57e40c2857449..4ec694f918d97 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link NotEquals}. * This class is generated. Do not edit it. */ public final class NotEqualsLongsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public NotEqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, + public NotEqualsLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public NotEqualsLongsEvaluator get(DriverContext context) { - return new NotEqualsLongsEvaluator(lhs.get(context), rhs.get(context), context); + return new NotEqualsLongsEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index de3f57d54d8e4..822d380386ee9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.logical; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Not}. * This class is generated. Do not edit it. */ public final class NotEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public NotEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + public NotEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, BooleanBlock vBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public NotEvaluator get(DriverContext context) { - return new NotEvaluator(v.get(context), context); + return new NotEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java index 83860fc328543..bdb8bfd0f613a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -17,20 +18,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RegexMatch}. * This class is generated. Do not edit it. */ public final class RegexMatchEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator input; private final CharacterRunAutomaton pattern; private final DriverContext driverContext; - public RegexMatchEvaluator(EvalOperator.ExpressionEvaluator input, CharacterRunAutomaton pattern, - DriverContext driverContext) { + public RegexMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator input, + CharacterRunAutomaton pattern, DriverContext driverContext) { + this.warnings = new Warnings(source); this.input = input; this.pattern = pattern; this.driverContext = driverContext; @@ -51,7 +57,14 @@ public BooleanBlock eval(int positionCount, BytesRefBlock inputBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { BytesRef inputScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (inputBlock.isNull(p) || inputBlock.getValueCount(p) != 1) { + if (inputBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (inputBlock.getValueCount(p) != 1) { + if (inputBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -82,18 +95,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory input; private final CharacterRunAutomaton pattern; - public Factory(EvalOperator.ExpressionEvaluator.Factory input, CharacterRunAutomaton pattern) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory input, + CharacterRunAutomaton pattern) { + this.source = source; this.input = input; this.pattern = pattern; } @Override public RegexMatchEvaluator get(DriverContext context) { - return new RegexMatchEvaluator(input.get(context), pattern, context); + return new RegexMatchEvaluator(source, input.get(context), pattern, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index e335a2cc50add..75558171ab58c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { boolean[] valuesValues = new boolean[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestBooleanEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestBooleanEvaluator(values, context); + return new GreatestBooleanEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index 0919b6c624572..e70d147ec19b0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -16,18 +17,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -59,7 +65,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { } position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -104,16 +117,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestBytesRefEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestBytesRefEvaluator(values, context); + return new GreatestBytesRefEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index acabb839e0543..4a5d49cb5853b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { double[] valuesValues = new double[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestDoubleEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestDoubleEvaluator(values, context); + return new GreatestDoubleEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index e2fc35c829b5f..6c675c3168523 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestIntEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestIntEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { int[] valuesValues = new int[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestIntEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestIntEvaluator(values, context); + return new GreatestIntEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index 8f10c02c53c00..3f4f0c748db3f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Greatest}. * This class is generated. Do not edit it. */ public final class GreatestLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public GreatestLongEvaluator(EvalOperator.ExpressionEvaluator[] values, + public GreatestLongEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { long[] valuesValues = new long[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public GreatestLongEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new GreatestLongEvaluator(values, context); + return new GreatestLongEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index ce337ae405cba..70d4345fe197c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastBooleanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastBooleanEvaluator(EvalOperator.ExpressionEvaluator[] values, + public LeastBooleanEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public BooleanBlock eval(int positionCount, BooleanBlock[] valuesBlocks) { boolean[] valuesValues = new boolean[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastBooleanEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastBooleanEvaluator(values, context); + return new LeastBooleanEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index 621d21e13f691..642ca36574cb6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -16,18 +17,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastBytesRefEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastBytesRefEvaluator(EvalOperator.ExpressionEvaluator[] values, + public LeastBytesRefEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -59,7 +65,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { } position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -104,16 +117,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastBytesRefEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastBytesRefEvaluator(values, context); + return new LeastBytesRefEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index 42255e56c6527..41b0ad4d4d085 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastDoubleEvaluator(EvalOperator.ExpressionEvaluator[] values, + public LeastDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public DoubleBlock eval(int positionCount, DoubleBlock[] valuesBlocks) { double[] valuesValues = new double[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastDoubleEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastDoubleEvaluator(values, context); + return new LeastDoubleEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index ca95f0096166e..c2c80db6ca0bb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastIntEvaluator(EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + public LeastIntEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -53,7 +60,14 @@ public IntBlock eval(int positionCount, IntBlock[] valuesBlocks) { int[] valuesValues = new int[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -94,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastIntEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastIntEvaluator(values, context); + return new LeastIntEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index 263972b414dd4..cd8ab3a0cd06f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.conditional; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Least}. * This class is generated. Do not edit it. */ public final class LeastLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public LeastLongEvaluator(EvalOperator.ExpressionEvaluator[] values, + public LeastLongEvaluator(Source source, EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.values = values; this.driverContext = driverContext; } @@ -54,7 +60,14 @@ public LongBlock eval(int positionCount, LongBlock[] valuesBlocks) { long[] valuesValues = new long[values.length]; position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -95,16 +108,19 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.values = values; } @Override public LeastLongEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new LeastLongEvaluator(values, context); + return new LeastLongEvaluator(source, values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java index f4109947c7406..8b1804cacfc21 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.time.ZoneId; @@ -15,12 +16,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateExtract}. * This class is generated. Do not edit it. */ public final class DateExtractConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator value; private final ChronoField chronoField; @@ -29,8 +34,9 @@ public final class DateExtractConstantEvaluator implements EvalOperator.Expressi private final DriverContext driverContext; - public DateExtractConstantEvaluator(EvalOperator.ExpressionEvaluator value, + public DateExtractConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator value, ChronoField chronoField, ZoneId zone, DriverContext driverContext) { + this.warnings = new Warnings(source); this.value = value; this.chronoField = chronoField; this.zone = zone; @@ -51,7 +57,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock valueBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p) || valueBlock.getValueCount(p) != 1) { + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -81,14 +94,17 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory value; private final ChronoField chronoField; private final ZoneId zone; - public Factory(EvalOperator.ExpressionEvaluator.Factory value, ChronoField chronoField, - ZoneId zone) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, + ChronoField chronoField, ZoneId zone) { + this.source = source; this.value = value; this.chronoField = chronoField; this.zone = zone; @@ -96,7 +112,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory value, ChronoField chron @Override public DateExtractConstantEvaluator get(DriverContext context) { - return new DateExtractConstantEvaluator(value.get(context), chronoField, zone, context); + return new DateExtractConstantEvaluator(source, value.get(context), chronoField, zone, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java index 37af410e1d49d..65af16e2a9f5b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java @@ -66,11 +66,25 @@ public LongBlock eval(int positionCount, LongBlock valueBlock, BytesRefBlock chr try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { BytesRef chronoFieldScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valueBlock.isNull(p) || valueBlock.getValueCount(p) != 1) { + if (valueBlock.isNull(p)) { result.appendNull(); continue position; } - if (chronoFieldBlock.isNull(p) || chronoFieldBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (chronoFieldBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (chronoFieldBlock.getValueCount(p) != 1) { + if (chronoFieldBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java index 1ef4b15860dde..38cc3e2809f0a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.common.time.DateFormatter; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. * This class is generated. Do not edit it. */ public final class DateFormatConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DateFormatter formatter; private final DriverContext driverContext; - public DateFormatConstantEvaluator(EvalOperator.ExpressionEvaluator val, DateFormatter formatter, - DriverContext driverContext) { + public DateFormatConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DateFormatter formatter, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.driverContext = driverContext; @@ -49,7 +55,14 @@ public Block eval(Page page) { public BytesRefBlock eval(int positionCount, LongBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -79,18 +92,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final DateFormatter formatter; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, DateFormatter formatter) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, + DateFormatter formatter) { + this.source = source; this.val = val; this.formatter = formatter; } @Override public DateFormatConstantEvaluator get(DriverContext context) { - return new DateFormatConstantEvaluator(val.get(context), formatter, context); + return new DateFormatConstantEvaluator(source, val.get(context), formatter, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java index 5f8077f908b39..d517c16cb4076 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Locale; @@ -17,12 +18,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateFormat}. * This class is generated. Do not edit it. */ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator formatter; @@ -31,8 +36,9 @@ public final class DateFormatEvaluator implements EvalOperator.ExpressionEvaluat private final DriverContext driverContext; - public DateFormatEvaluator(EvalOperator.ExpressionEvaluator val, + public DateFormatEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator formatter, Locale locale, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.formatter = formatter; this.locale = locale; @@ -60,11 +66,25 @@ public BytesRefBlock eval(int positionCount, LongBlock valBlock, BytesRefBlock f try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef formatterScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (formatterBlock.isNull(p) || formatterBlock.getValueCount(p) != 1) { + if (formatterBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (formatterBlock.getValueCount(p) != 1) { + if (formatterBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -96,14 +116,17 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory formatter; private final Locale locale; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory formatter, Locale locale) { + this.source = source; this.val = val; this.formatter = formatter; this.locale = locale; @@ -111,7 +134,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory val, @Override public DateFormatEvaluator get(DriverContext context) { - return new DateFormatEvaluator(val.get(context), formatter.get(context), locale, context); + return new DateFormatEvaluator(source, val.get(context), formatter.get(context), locale, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 84e141dcdf448..3a6b44d82a011 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -56,7 +56,14 @@ public LongBlock eval(int positionCount, BytesRefBlock valBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index 233d2f45c93fa..2da9310b0f53a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -66,11 +66,25 @@ public LongBlock eval(int positionCount, BytesRefBlock valBlock, BytesRefBlock f BytesRef valScratch = new BytesRef(); BytesRef formatterScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { result.appendNull(); continue position; } - if (formatterBlock.isNull(p) || formatterBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (formatterBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (formatterBlock.getValueCount(p) != 1) { + if (formatterBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java index ff31d753427d4..27a15ca19bec9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.date; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.common.Rounding; @@ -14,20 +15,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link DateTrunc}. * This class is generated. Do not edit it. */ public final class DateTruncEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator fieldVal; private final Rounding.Prepared rounding; private final DriverContext driverContext; - public DateTruncEvaluator(EvalOperator.ExpressionEvaluator fieldVal, Rounding.Prepared rounding, - DriverContext driverContext) { + public DateTruncEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, + Rounding.Prepared rounding, DriverContext driverContext) { + this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.rounding = rounding; this.driverContext = driverContext; @@ -47,7 +53,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock fieldValBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (fieldValBlock.isNull(p) || fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -77,18 +90,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; private final Rounding.Prepared rounding; - public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal, Rounding.Prepared rounding) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal, + Rounding.Prepared rounding) { + this.source = source; this.fieldVal = fieldVal; this.rounding = rounding; } @Override public DateTruncEvaluator get(DriverContext context) { - return new DateTruncEvaluator(fieldVal.get(context), rounding, context); + return new DateTruncEvaluator(source, fieldVal.get(context), rounding, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index d4c04b724377e..45465468f7c91 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -11,17 +11,22 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Now}. * This class is generated. Do not edit it. */ public final class NowEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final long now; private final DriverContext driverContext; - public NowEvaluator(long now, DriverContext driverContext) { + public NowEvaluator(Source source, long now, DriverContext driverContext) { + this.warnings = new Warnings(source); this.now = now; this.driverContext = driverContext; } @@ -50,15 +55,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final long now; - public Factory(long now) { + public Factory(Source source, long now) { + this.source = source; this.now = now; } @Override public NowEvaluator get(DriverContext context) { - return new NowEvaluator(now, context); + return new NowEvaluator(source, now, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index c3a347433ff9f..4ac2fa7d2738e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.ip; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -18,20 +19,25 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link CIDRMatch}. * This class is generated. Do not edit it. */ public final class CIDRMatchEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator ip; private final EvalOperator.ExpressionEvaluator[] cidrs; private final DriverContext driverContext; - public CIDRMatchEvaluator(EvalOperator.ExpressionEvaluator ip, + public CIDRMatchEvaluator(Source source, EvalOperator.ExpressionEvaluator ip, EvalOperator.ExpressionEvaluator[] cidrs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.ip = ip; this.cidrs = cidrs; this.driverContext = driverContext; @@ -70,12 +76,26 @@ public BooleanBlock eval(int positionCount, BytesRefBlock ipBlock, BytesRefBlock cidrsScratch[i] = new BytesRef(); } position: for (int p = 0; p < positionCount; p++) { - if (ipBlock.isNull(p) || ipBlock.getValueCount(p) != 1) { + if (ipBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (ipBlock.getValueCount(p) != 1) { + if (ipBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } for (int i = 0; i < cidrsBlocks.length; i++) { - if (cidrsBlocks[i].isNull(p) || cidrsBlocks[i].getValueCount(p) != 1) { + if (cidrsBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (cidrsBlocks[i].getValueCount(p) != 1) { + if (cidrsBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -122,12 +142,15 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory ip; private final EvalOperator.ExpressionEvaluator.Factory[] cidrs; - public Factory(EvalOperator.ExpressionEvaluator.Factory ip, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory ip, EvalOperator.ExpressionEvaluator.Factory[] cidrs) { + this.source = source; this.ip = ip; this.cidrs = cidrs; } @@ -135,7 +158,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory ip, @Override public CIDRMatchEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] cidrs = Arrays.stream(this.cidrs).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new CIDRMatchEvaluator(ip.get(context), cidrs, context); + return new CIDRMatchEvaluator(source, ip.get(context), cidrs, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index b7e061e5e684b..d7c793b99e57b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,18 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. * This class is generated. Do not edit it. */ public final class AbsDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator fieldVal; private final DriverContext driverContext; - public AbsDoubleEvaluator(EvalOperator.ExpressionEvaluator fieldVal, + public AbsDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { + this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; } @@ -43,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock fieldValBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (fieldValBlock.isNull(p) || fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -73,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; - public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.source = source; this.fieldVal = fieldVal; } @Override public AbsDoubleEvaluator get(DriverContext context) { - return new AbsDoubleEvaluator(fieldVal.get(context), context); + return new AbsDoubleEvaluator(source, fieldVal.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index 9894a8ebcdce3..9964a95fafe0c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. * This class is generated. Do not edit it. */ public final class AbsIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator fieldVal; private final DriverContext driverContext; - public AbsIntEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { + public AbsIntEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock fieldValBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (fieldValBlock.isNull(p) || fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; - public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.source = source; this.fieldVal = fieldVal; } @Override public AbsIntEvaluator get(DriverContext context) { - return new AbsIntEvaluator(fieldVal.get(context), context); + return new AbsIntEvaluator(source, fieldVal.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index ebbb754e28188..9457112aa9d81 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Abs}. * This class is generated. Do not edit it. */ public final class AbsLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator fieldVal; private final DriverContext driverContext; - public AbsLongEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext driverContext) { + public AbsLongEvaluator(Source source, EvalOperator.ExpressionEvaluator fieldVal, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.fieldVal = fieldVal; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock fieldValBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (fieldValBlock.isNull(p) || fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (fieldValBlock.getValueCount(p) != 1) { + if (fieldValBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; - public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.source = source; this.fieldVal = fieldVal; } @Override public AbsLongEvaluator get(DriverContext context) { - return new AbsLongEvaluator(fieldVal.get(context), context); + return new AbsLongEvaluator(source, fieldVal.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index ce43cb0d88d09..1c86fe46e9b93 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 2b8168cd2abc7..fc73f4c475676 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index ac4d61502be33..b6d0a628c329c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Atan2}. * This class is generated. Do not edit it. */ public final class Atan2Evaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator y; private final EvalOperator.ExpressionEvaluator x; private final DriverContext driverContext; - public Atan2Evaluator(EvalOperator.ExpressionEvaluator y, EvalOperator.ExpressionEvaluator x, - DriverContext driverContext) { + public Atan2Evaluator(Source source, EvalOperator.ExpressionEvaluator y, + EvalOperator.ExpressionEvaluator x, DriverContext driverContext) { + this.warnings = new Warnings(source); this.y = y; this.x = x; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock yBlock, DoubleBlock xBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (yBlock.isNull(p) || yBlock.getValueCount(p) != 1) { + if (yBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (yBlock.getValueCount(p) != 1) { + if (yBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (xBlock.isNull(p) || xBlock.getValueCount(p) != 1) { + if (xBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (xBlock.getValueCount(p) != 1) { + if (xBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory y; private final EvalOperator.ExpressionEvaluator.Factory x; - public Factory(EvalOperator.ExpressionEvaluator.Factory y, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory y, EvalOperator.ExpressionEvaluator.Factory x) { + this.source = source; this.y = y; this.x = x; } @Override public Atan2Evaluator get(DriverContext context) { - return new Atan2Evaluator(y.get(context), x.get(context), context); + return new Atan2Evaluator(source, y.get(context), x.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index 2ce4dac48fbf5..b40a6cde6550e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Atan}. * This class is generated. Do not edit it. */ public final class AtanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public AtanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public AtanEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public AtanEvaluator get(DriverContext context) { - return new AtanEvaluator(val.get(context), context); + return new AtanEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index 5b09822354480..a13d11199c0fb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastIntToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastIntToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + public CastIntToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, IntBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastIntToDoubleEvaluator get(DriverContext context) { - return new CastIntToDoubleEvaluator(v.get(context), context); + return new CastIntToDoubleEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index 0b9f3a5cd2a51..cf91f080537e7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastIntToLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastIntToLongEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + public CastIntToLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, IntBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastIntToLongEvaluator get(DriverContext context) { - return new CastIntToLongEvaluator(v.get(context), context); + return new CastIntToLongEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index ee228b79085b7..15b18a91ee241 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastIntToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastIntToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, + public CastIntToUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -45,7 +51,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, IntBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastIntToUnsignedLongEvaluator get(DriverContext context) { - return new CastIntToUnsignedLongEvaluator(v.get(context), context); + return new CastIntToUnsignedLongEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index 9a70690bf891d..1bb63cb66eec5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, + public CastLongToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -45,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastLongToDoubleEvaluator get(DriverContext context) { - return new CastLongToDoubleEvaluator(v.get(context), context); + return new CastLongToDoubleEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index a258b2eeb7636..3ed067671183d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,18 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastLongToUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastLongToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, + public CastLongToUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -43,7 +49,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -73,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastLongToUnsignedLongEvaluator get(DriverContext context) { - return new CastLongToUnsignedLongEvaluator(v.get(context), context); + return new CastLongToUnsignedLongEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index f57d0f4dae34d..5135aab0dcc50 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cast}. * This class is generated. Do not edit it. */ public final class CastUnsignedLongToDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public CastUnsignedLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, + public CastUnsignedLongToDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -45,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public CastUnsignedLongToDoubleEvaluator get(DriverContext context) { - return new CastUnsignedLongToDoubleEvaluator(v.get(context), context); + return new CastUnsignedLongToDoubleEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index fb25d318f7336..500f108afbe39 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Ceil}. * This class is generated. Do not edit it. */ public final class CeilDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public CeilDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public CeilDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public CeilDoubleEvaluator get(DriverContext context) { - return new CeilDoubleEvaluator(val.get(context), context); + return new CeilDoubleEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index 7fb5063875834..dd3961845c244 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Cos}. * This class is generated. Do not edit it. */ public final class CosEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public CosEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public CosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public CosEvaluator get(DriverContext context) { - return new CosEvaluator(val.get(context), context); + return new CosEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index ab862a62c6bfe..2f0bbaaacb40a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index 99ceca3521883..f8a10822a3c44 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Floor}. * This class is generated. Do not edit it. */ public final class FloorDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public FloorDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public FloorDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public FloorDoubleEvaluator get(DriverContext context) { - return new FloorDoubleEvaluator(val.get(context), context); + return new FloorDoubleEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java index 6ad3ccb6cb287..85a5476b9510d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IsFinite}. * This class is generated. Do not edit it. */ public final class IsFiniteEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public IsFiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public IsFiniteEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public IsFiniteEvaluator get(DriverContext context) { - return new IsFiniteEvaluator(val.get(context), context); + return new IsFiniteEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java index 00b260467046c..1813fd1ee056e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IsInfinite}. * This class is generated. Do not edit it. */ public final class IsInfiniteEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public IsInfiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public IsInfiniteEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public IsInfiniteEvaluator get(DriverContext context) { - return new IsInfiniteEvaluator(val.get(context), context); + return new IsInfiniteEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java index d7639010d9533..b9dee40de583d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,17 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link IsNaN}. * This class is generated. Do not edit it. */ public final class IsNaNEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public IsNaNEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public IsNaNEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public Block eval(Page page) { public BooleanBlock eval(int positionCount, DoubleBlock valBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -74,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public IsNaNEvaluator get(DriverContext context) { - return new IsNaNEvaluator(val.get(context), context); + return new IsNaNEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index 6a42dadae78ea..d402cf7a79e68 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index 782e35e9a74ab..a1aa03af7d7f5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index cfcf56a637f32..848baaea72b67 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index 1b092bcbfd8a6..01812d8b1d2c9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java index 775cee816be7b..33bf2b4bd0c25 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (baseBlock.isNull(p) || baseBlock.getValueCount(p) != 1) { + if (baseBlock.isNull(p)) { result.appendNull(); continue position; } - if (exponentBlock.isNull(p) || exponentBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (exponentBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (exponentBlock.getValueCount(p) != 1) { + if (exponentBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java new file mode 100644 index 0000000000000..1232e0dda7c0f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java @@ -0,0 +1,146 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. + * This class is generated. Do not edit it. + */ +public final class PowIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator base; + + private final EvalOperator.ExpressionEvaluator exponent; + + private final DriverContext driverContext; + + public PowIntEvaluator(Source source, EvalOperator.ExpressionEvaluator base, + EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.base = base; + this.exponent = exponent; + this.driverContext = driverContext; + } + + @Override + public Block.Ref eval(Page page) { + try (Block.Ref baseRef = base.eval(page)) { + DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); + try (Block.Ref exponentRef = exponent.eval(page)) { + DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); + DoubleVector baseVector = baseBlock.asVector(); + if (baseVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + DoubleVector exponentVector = exponentBlock.asVector(); + if (exponentVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); + } + } + } + + public IntBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (baseBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (baseBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (exponentBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (exponentBlock.getValueCount(p) != 1) { + if (exponentBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendInt(Pow.processInt(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), exponentBlock.getDouble(exponentBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public IntBlock eval(int positionCount, DoubleVector baseVector, DoubleVector exponentVector) { + try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendInt(Pow.processInt(baseVector.getDouble(p), exponentVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(base, exponent); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory base; + + private final EvalOperator.ExpressionEvaluator.Factory exponent; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, + EvalOperator.ExpressionEvaluator.Factory exponent) { + this.source = source; + this.base = base; + this.exponent = exponent; + } + + @Override + public PowIntEvaluator get(DriverContext context) { + return new PowIntEvaluator(source, base.get(context), exponent.get(context), context); + } + + @Override + public String toString() { + return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java new file mode 100644 index 0000000000000..bd2e5f5e10ec2 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java @@ -0,0 +1,146 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Pow}. + * This class is generated. Do not edit it. + */ +public final class PowLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator base; + + private final EvalOperator.ExpressionEvaluator exponent; + + private final DriverContext driverContext; + + public PowLongEvaluator(Source source, EvalOperator.ExpressionEvaluator base, + EvalOperator.ExpressionEvaluator exponent, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.base = base; + this.exponent = exponent; + this.driverContext = driverContext; + } + + @Override + public Block.Ref eval(Page page) { + try (Block.Ref baseRef = base.eval(page)) { + DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); + try (Block.Ref exponentRef = exponent.eval(page)) { + DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); + DoubleVector baseVector = baseBlock.asVector(); + if (baseVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + DoubleVector exponentVector = exponentBlock.asVector(); + if (exponentVector == null) { + return Block.Ref.floating(eval(page.getPositionCount(), baseBlock, exponentBlock)); + } + return Block.Ref.floating(eval(page.getPositionCount(), baseVector, exponentVector)); + } + } + } + + public LongBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock exponentBlock) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (baseBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (baseBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (exponentBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (exponentBlock.getValueCount(p) != 1) { + if (exponentBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendLong(Pow.processLong(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), exponentBlock.getDouble(exponentBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public LongBlock eval(int positionCount, DoubleVector baseVector, DoubleVector exponentVector) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendLong(Pow.processLong(baseVector.getDouble(p), exponentVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(base, exponent); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory base; + + private final EvalOperator.ExpressionEvaluator.Factory exponent; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, + EvalOperator.ExpressionEvaluator.Factory exponent) { + this.source = source; + this.base = base; + this.exponent = exponent; + } + + @Override + public PowLongEvaluator get(DriverContext context) { + return new PowLongEvaluator(source, base.get(context), exponent.get(context), context); + } + + @Override + public String toString() { + return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index a658e73a3b44f..3b85a32fc3081 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundDoubleEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator decimals; private final DriverContext driverContext; - public RoundDoubleEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock, LongBlock decimalsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (decimalsBlock.isNull(p) || decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory decimals; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory decimals) { + this.source = source; this.val = val; this.decimals = decimals; } @Override public RoundDoubleEvaluator get(DriverContext context) { - return new RoundDoubleEvaluator(val.get(context), decimals.get(context), context); + return new RoundDoubleEvaluator(source, val.get(context), decimals.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index 316655de1d7b7..c36a1fe25b61f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,18 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundDoubleNoDecimalsEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public RoundDoubleNoDecimalsEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundDoubleNoDecimalsEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -43,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -73,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public RoundDoubleNoDecimalsEvaluator get(DriverContext context) { - return new RoundDoubleNoDecimalsEvaluator(val.get(context), context); + return new RoundDoubleNoDecimalsEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index 71ea5938afe48..f96f92e5d0b38 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,20 +16,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundIntEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator decimals; private final DriverContext driverContext; - public RoundIntEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; @@ -54,11 +60,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock valBlock, LongBlock decimalsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (decimalsBlock.isNull(p) || decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -88,19 +108,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory decimals; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory decimals) { + this.source = source; this.val = val; this.decimals = decimals; } @Override public RoundIntEvaluator get(DriverContext context) { - return new RoundIntEvaluator(val.get(context), decimals.get(context), context); + return new RoundIntEvaluator(source, val.get(context), decimals.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index eae45800fdee0..c8a2fdd384f40 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator decimals; private final DriverContext driverContext; - public RoundLongEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (decimalsBlock.isNull(p) || decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory decimals; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory decimals) { + this.source = source; this.val = val; this.decimals = decimals; } @Override public RoundLongEvaluator get(DriverContext context) { - return new RoundLongEvaluator(val.get(context), decimals.get(context), context); + return new RoundLongEvaluator(source, val.get(context), decimals.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index 5f8cb5370b213..5c94e386d4978 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Round}. * This class is generated. Do not edit it. */ public final class RoundUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final EvalOperator.ExpressionEvaluator decimals; private final DriverContext driverContext; - public RoundUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, + public RoundUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, EvalOperator.ExpressionEvaluator decimals, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.decimals = decimals; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (decimalsBlock.isNull(p) || decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (decimalsBlock.getValueCount(p) != 1) { + if (decimalsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; private final EvalOperator.ExpressionEvaluator.Factory decimals; - public Factory(EvalOperator.ExpressionEvaluator.Factory val, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, EvalOperator.ExpressionEvaluator.Factory decimals) { + this.source = source; this.val = val; this.decimals = decimals; } @Override public RoundUnsignedLongEvaluator get(DriverContext context) { - return new RoundUnsignedLongEvaluator(val.get(context), decimals.get(context), context); + return new RoundUnsignedLongEvaluator(source, val.get(context), decimals.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index fd2f0b1e3de64..a3c9e1481c19e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sin}. * This class is generated. Do not edit it. */ public final class SinEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public SinEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public SinEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public SinEvaluator get(DriverContext context) { - return new SinEvaluator(val.get(context), context); + return new SinEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index 342c1b86a873f..c6020d6bd86ea 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index 7be90cb5c87c0..516d6639fb115 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index d7a24ebafec97..3719bc6bd7326 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, IntBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index 57055641877c9..a9620291ddd8b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -50,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index 8eddd0293ae86..6478f0639bb9a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -15,18 +16,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sqrt}. * This class is generated. Do not edit it. */ public final class SqrtUnsignedLongEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public SqrtUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, + public SqrtUnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -45,7 +51,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, LongBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +88,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public SqrtUnsignedLongEvaluator get(DriverContext context) { - return new SqrtUnsignedLongEvaluator(val.get(context), context); + return new SqrtUnsignedLongEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 2ff4ccba94ae0..ed410d20d122b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Tan}. * This class is generated. Do not edit it. */ public final class TanEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public TanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public TanEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public TanEvaluator get(DriverContext context) { - return new TanEvaluator(val.get(context), context); + return new TanEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index 05cfc6446cdb6..94fa4fad18fd3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Tanh}. * This class is generated. Do not edit it. */ public final class TanhEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public TanhEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public TanhEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock valBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public TanhEvaluator get(DriverContext context) { - return new TanhEvaluator(val.get(context), context); + return new TanhEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index 2b3045d29c70f..99e87ce490eb1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.Arrays; @@ -18,20 +19,25 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Concat}. * This class is generated. Do not edit it. */ public final class ConcatEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final BreakingBytesRefBuilder scratch; private final EvalOperator.ExpressionEvaluator[] values; private final DriverContext driverContext; - public ConcatEvaluator(BreakingBytesRefBuilder scratch, EvalOperator.ExpressionEvaluator[] values, - DriverContext driverContext) { + public ConcatEvaluator(Source source, BreakingBytesRefBuilder scratch, + EvalOperator.ExpressionEvaluator[] values, DriverContext driverContext) { + this.warnings = new Warnings(source); this.scratch = scratch; this.values = values; this.driverContext = driverContext; @@ -64,7 +70,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock[] valuesBlocks) { } position: for (int p = 0; p < positionCount; p++) { for (int i = 0; i < valuesBlocks.length; i++) { - if (valuesBlocks[i].isNull(p) || valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].isNull(p)) { + result.appendNull(); + continue position; + } + if (valuesBlocks[i].getValueCount(p) != 1) { + if (valuesBlocks[i].getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -109,12 +122,15 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final Function scratch; private final EvalOperator.ExpressionEvaluator.Factory[] values; - public Factory(Function scratch, + public Factory(Source source, Function scratch, EvalOperator.ExpressionEvaluator.Factory[] values) { + this.source = source; this.scratch = scratch; this.values = values; } @@ -122,7 +138,7 @@ public Factory(Function scratch, @Override public ConcatEvaluator get(DriverContext context) { EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); - return new ConcatEvaluator(scratch.apply(context), values, context); + return new ConcatEvaluator(source, scratch.apply(context), values, context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index b1cadf96b80cd..8d1d197aae9ad 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link EndsWith}. * This class is generated. Do not edit it. */ public final class EndsWithEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator suffix; private final DriverContext driverContext; - public EndsWithEvaluator(EvalOperator.ExpressionEvaluator str, + public EndsWithEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator suffix, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.suffix = suffix; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBloc BytesRef strScratch = new BytesRef(); BytesRef suffixScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (suffixBlock.isNull(p) || suffixBlock.getValueCount(p) != 1) { + if (suffixBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (suffixBlock.getValueCount(p) != 1) { + if (suffixBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -94,19 +114,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory suffix; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory suffix) { + this.source = source; this.str = str; this.suffix = suffix; } @Override public EndsWithEvaluator get(DriverContext context) { - return new EndsWithEvaluator(str.get(context), suffix.get(context), context); + return new EndsWithEvaluator(source, str.get(context), suffix.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index 034cf5ddc5727..0f68955507d50 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -14,17 +15,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LTrim}. * This class is generated. Do not edit it. */ public final class LTrimEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public LTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public LTrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +89,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public LTrimEvaluator get(DriverContext context) { - return new LTrimEvaluator(val.get(context), context); + return new LTrimEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index b2cbbc8ed9cf6..13e7cbe9ece92 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.function.Function; @@ -18,12 +19,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Left}. * This class is generated. Do not edit it. */ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final BytesRef out; private final UnicodeUtil.UTF8CodePoint cp; @@ -34,9 +39,10 @@ public final class LeftEvaluator implements EvalOperator.ExpressionEvaluator { private final DriverContext driverContext; - public LeftEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, + public LeftEvaluator(Source source, BytesRef out, UnicodeUtil.UTF8CodePoint cp, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { + this.warnings = new Warnings(source); this.out = out; this.cp = cp; this.str = str; @@ -65,11 +71,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock le try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (lengthBlock.isNull(p) || lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -100,6 +120,8 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final Function out; private final Function cp; @@ -108,10 +130,11 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory length; - public Factory(Function out, + public Factory(Source source, Function out, Function cp, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory length) { + this.source = source; this.out = out; this.cp = cp; this.str = str; @@ -120,7 +143,7 @@ public Factory(Function out, @Override public LeftEvaluator get(DriverContext context) { - return new LeftEvaluator(out.apply(context), cp.apply(context), str.get(context), length.get(context), context); + return new LeftEvaluator(source, out.apply(context), cp.apply(context), str.get(context), length.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index 2896de06f656d..890b56e78ca13 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,17 +17,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Length}. * This class is generated. Do not edit it. */ public final class LengthEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public LengthEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public LengthEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -46,7 +53,14 @@ public IntBlock eval(int positionCount, BytesRefBlock valBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -77,15 +91,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public LengthEvaluator get(DriverContext context) { - return new LengthEvaluator(val.get(context), context); + return new LengthEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index a2d1d6bb34384..fdd1c2a23357f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -14,17 +15,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link RTrim}. * This class is generated. Do not edit it. */ public final class RTrimEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public RTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public RTrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +89,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public RTrimEvaluator get(DriverContext context) { - return new RTrimEvaluator(val.get(context), context); + return new RTrimEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java index b3af24d2f6851..71f8724d17a80 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.regex.Pattern; @@ -65,11 +66,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlo BytesRef strScratch = new BytesRef(); BytesRef newStrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { result.appendNull(); continue position; } - if (newStrBlock.isNull(p) || newStrBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (newStrBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (newStrBlock.getValueCount(p) != 1) { + if (newStrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java index 89013fd3ca2f1..8d4deb878f117 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.regex.PatternSyntaxException; @@ -73,15 +74,36 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlo BytesRef regexScratch = new BytesRef(); BytesRef newStrScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { result.appendNull(); continue position; } - if (regexBlock.isNull(p) || regexBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (regexBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (regexBlock.getValueCount(p) != 1) { + if (regexBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (newStrBlock.isNull(p)) { result.appendNull(); continue position; } - if (newStrBlock.isNull(p) || newStrBlock.getValueCount(p) != 1) { + if (newStrBlock.getValueCount(p) != 1) { + if (newStrBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index 1e3094ed8d5d3..96473a2deefd2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.function.Function; @@ -18,12 +19,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Right}. * This class is generated. Do not edit it. */ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final BytesRef out; private final UnicodeUtil.UTF8CodePoint cp; @@ -34,9 +39,10 @@ public final class RightEvaluator implements EvalOperator.ExpressionEvaluator { private final DriverContext driverContext; - public RightEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, + public RightEvaluator(Source source, BytesRef out, UnicodeUtil.UTF8CodePoint cp, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { + this.warnings = new Warnings(source); this.out = out; this.cp = cp; this.str = str; @@ -65,11 +71,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock le try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (lengthBlock.isNull(p) || lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -100,6 +120,8 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final Function out; private final Function cp; @@ -108,10 +130,11 @@ static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final EvalOperator.ExpressionEvaluator.Factory length; - public Factory(Function out, + public Factory(Source source, Function out, Function cp, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory length) { + this.source = source; this.out = out; this.cp = cp; this.str = str; @@ -120,7 +143,7 @@ public Factory(Function out, @Override public RightEvaluator get(DriverContext context) { - return new RightEvaluator(out.apply(context), cp.apply(context), str.get(context), length.get(context), context); + return new RightEvaluator(source, out.apply(context), cp.apply(context), str.get(context), length.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index bb5b3569934c0..7081f22606112 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.function.Function; @@ -15,12 +16,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Split}. * This class is generated. Do not edit it. */ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final byte delim; @@ -29,8 +34,9 @@ public final class SplitSingleByteEvaluator implements EvalOperator.ExpressionEv private final DriverContext driverContext; - public SplitSingleByteEvaluator(EvalOperator.ExpressionEvaluator str, byte delim, + public SplitSingleByteEvaluator(Source source, EvalOperator.ExpressionEvaluator str, byte delim, BytesRef scratch, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.delim = delim; this.scratch = scratch; @@ -52,7 +58,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -83,14 +96,17 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final byte delim; private final Function scratch; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, byte delim, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, byte delim, Function scratch) { + this.source = source; this.str = str; this.delim = delim; this.scratch = scratch; @@ -98,7 +114,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory str, byte delim, @Override public SplitSingleByteEvaluator get(DriverContext context) { - return new SplitSingleByteEvaluator(str.get(context), delim, scratch.apply(context), context); + return new SplitSingleByteEvaluator(source, str.get(context), delim, scratch.apply(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index d80d8d65c3606..82feca1b79053 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.util.function.Function; @@ -15,12 +16,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Split}. * This class is generated. Do not edit it. */ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator delim; @@ -29,8 +34,9 @@ public final class SplitVariableEvaluator implements EvalOperator.ExpressionEval private final DriverContext driverContext; - public SplitVariableEvaluator(EvalOperator.ExpressionEvaluator str, + public SplitVariableEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator delim, BytesRef scratch, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.delim = delim; this.scratch = scratch; @@ -59,11 +65,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBlo BytesRef strScratch = new BytesRef(); BytesRef delimScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (delimBlock.isNull(p) || delimBlock.getValueCount(p) != 1) { + if (delimBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (delimBlock.getValueCount(p) != 1) { + if (delimBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -96,14 +116,17 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory delim; private final Function scratch; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory delim, Function scratch) { + this.source = source; this.str = str; this.delim = delim; this.scratch = scratch; @@ -111,7 +134,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory str, @Override public SplitVariableEvaluator get(DriverContext context) { - return new SplitVariableEvaluator(str.get(context), delim.get(context), scratch.apply(context), context); + return new SplitVariableEvaluator(source, str.get(context), delim.get(context), scratch.apply(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index 564dd1b7760be..9eb1c488f52dd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link StartsWith}. * This class is generated. Do not edit it. */ public final class StartsWithEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator prefix; private final DriverContext driverContext; - public StartsWithEvaluator(EvalOperator.ExpressionEvaluator str, + public StartsWithEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator prefix, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.prefix = prefix; this.driverContext = driverContext; @@ -57,11 +63,25 @@ public BooleanBlock eval(int positionCount, BytesRefBlock strBlock, BytesRefBloc BytesRef strScratch = new BytesRef(); BytesRef prefixScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (prefixBlock.isNull(p) || prefixBlock.getValueCount(p) != 1) { + if (prefixBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (prefixBlock.getValueCount(p) != 1) { + if (prefixBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -94,19 +114,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory prefix; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory prefix) { + this.source = source; this.str = str; this.prefix = prefix; } @Override public StartsWithEvaluator get(DriverContext context) { - return new StartsWithEvaluator(str.get(context), prefix.get(context), context); + return new StartsWithEvaluator(source, str.get(context), prefix.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index f0b4b0363ebc5..9da104137ba94 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,12 +17,16 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Substring}. * This class is generated. Do not edit it. */ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator start; @@ -30,9 +35,10 @@ public final class SubstringEvaluator implements EvalOperator.ExpressionEvaluato private final DriverContext driverContext; - public SubstringEvaluator(EvalOperator.ExpressionEvaluator str, + public SubstringEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator start, EvalOperator.ExpressionEvaluator length, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.start = start; this.length = length; @@ -67,15 +73,36 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock st try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (startBlock.isNull(p)) { result.appendNull(); continue position; } - if (startBlock.isNull(p) || startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (lengthBlock.isNull(p) || lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lengthBlock.getValueCount(p) != 1) { + if (lengthBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -107,15 +134,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory start; private final EvalOperator.ExpressionEvaluator.Factory length; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory start, EvalOperator.ExpressionEvaluator.Factory length) { + this.source = source; this.str = str; this.start = start; this.length = length; @@ -123,7 +153,7 @@ public Factory(EvalOperator.ExpressionEvaluator.Factory str, @Override public SubstringEvaluator get(DriverContext context) { - return new SubstringEvaluator(str.get(context), start.get(context), length.get(context), context); + return new SubstringEvaluator(source, str.get(context), start.get(context), length.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index a410df8bbdc69..08d12ac049837 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -16,20 +17,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Substring}. * This class is generated. Do not edit it. */ public final class SubstringNoLengthEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator str; private final EvalOperator.ExpressionEvaluator start; private final DriverContext driverContext; - public SubstringNoLengthEvaluator(EvalOperator.ExpressionEvaluator str, + public SubstringNoLengthEvaluator(Source source, EvalOperator.ExpressionEvaluator str, EvalOperator.ExpressionEvaluator start, DriverContext driverContext) { + this.warnings = new Warnings(source); this.str = str; this.start = start; this.driverContext = driverContext; @@ -56,11 +62,25 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock strBlock, IntBlock st try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef strScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (strBlock.isNull(p) || strBlock.getValueCount(p) != 1) { + if (strBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (strBlock.getValueCount(p) != 1) { + if (strBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (startBlock.isNull(p) || startBlock.getValueCount(p) != 1) { + if (startBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (startBlock.getValueCount(p) != 1) { + if (startBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -91,19 +111,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory str; private final EvalOperator.ExpressionEvaluator.Factory start; - public Factory(EvalOperator.ExpressionEvaluator.Factory str, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, EvalOperator.ExpressionEvaluator.Factory start) { + this.source = source; this.str = str; this.start = start; } @Override public SubstringNoLengthEvaluator get(DriverContext context) { - return new SubstringNoLengthEvaluator(str.get(context), start.get(context), context); + return new SubstringNoLengthEvaluator(source, str.get(context), start.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index 38b42070e96a6..1ecb6b3bd578f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.string; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.apache.lucene.util.BytesRef; @@ -14,17 +15,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Trim}. * This class is generated. Do not edit it. */ public final class TrimEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator val; private final DriverContext driverContext; - public TrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverContext) { + public TrimEvaluator(Source source, EvalOperator.ExpressionEvaluator val, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.val = val; this.driverContext = driverContext; } @@ -44,7 +51,14 @@ public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) { try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { BytesRef valScratch = new BytesRef(); position: for (int p = 0; p < positionCount; p++) { - if (valBlock.isNull(p) || valBlock.getValueCount(p) != 1) { + if (valBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valBlock.getValueCount(p) != 1) { + if (valBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -75,15 +89,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory val; - public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; this.val = val; } @Override public TrimEvaluator get(DriverContext context) { - return new TrimEvaluator(val.get(context), context); + return new TrimEvaluator(source, val.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index f484a77c30ed2..04b433ecde34a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.time.DateTimeException; @@ -54,7 +55,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock datetimeBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (datetimeBlock.isNull(p) || datetimeBlock.getValueCount(p) != 1) { + if (datetimeBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (datetimeBlock.getValueCount(p) != 1) { + if (datetimeBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index 1e9cf33ae39e2..071369c29f333 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Add}. * This class is generated. Do not edit it. */ public final class AddDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public AddDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public AddDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public AddDoublesEvaluator get(DriverContext context) { - return new AddDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new AddDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index e7a3b57479b99..bf9157540ea55 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index d2e029ff276b8..51199df88fb9b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index 54b7b8df88178..10b21fb5898e8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index f906d83b19ce4..bb9f55f2b5b85 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Div}. * This class is generated. Do not edit it. */ public final class DivDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public DivDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public DivDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public DivDoublesEvaluator get(DriverContext context) { - return new DivDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new DivDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index 53cfbd8540e33..de3fb03fe4405 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 31f62d3d729c5..9eb02cbd47614 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index 104208de1e13f..50e3c933fec41 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index 6d4f2d08b0b6e..8d441ffe10a48 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mod}. * This class is generated. Do not edit it. */ public final class ModDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public ModDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public ModDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public ModDoublesEvaluator get(DriverContext context) { - return new ModDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new ModDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index 1f6979179627d..c2c44dba5207d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index 3bc252c5cd059..58b3f055db6b2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index a18a99c7e220f..5b79aa8653923 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 4ab6801f66b92..1b9d10bff58e9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Mul}. * This class is generated. Do not edit it. */ public final class MulDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public MulDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public MulDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public MulDoublesEvaluator get(DriverContext context) { - return new MulDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new MulDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index 9926668c5e505..7501d0fc505a1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index 8be74005e1940..383e55755917d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index 4ba489dc65f06..95ecaee6b34ac 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index 330b3afa3df19..5915d4d476f19 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,17 +14,23 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Neg}. * This class is generated. Do not edit it. */ public final class NegDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator v; private final DriverContext driverContext; - public NegDoublesEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverContext) { + public NegDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator v, + DriverContext driverContext) { + this.warnings = new Warnings(source); this.v = v; this.driverContext = driverContext; } @@ -42,7 +49,14 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock vBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -72,15 +86,18 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory v; - public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; this.v = v; } @Override public NegDoublesEvaluator get(DriverContext context) { - return new NegDoublesEvaluator(v.get(context), context); + return new NegDoublesEvaluator(source, v.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index 9691099b03924..1821406f061bd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock vBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index 4d8ee14d4569b..49a0096665112 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -49,7 +50,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock vBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (vBlock.isNull(p) || vBlock.getValueCount(p) != 1) { + if (vBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (vBlock.getValueCount(p) != 1) { + if (vBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index de81736c42abf..88d94573b7562 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import java.time.DateTimeException; @@ -54,7 +55,14 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock datetimeBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (datetimeBlock.isNull(p) || datetimeBlock.getValueCount(p) != 1) { + if (datetimeBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (datetimeBlock.getValueCount(p) != 1) { + if (datetimeBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 6609d6cfbb4ae..d479d0fe751c9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -13,20 +14,25 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Sub}. * This class is generated. Do not edit it. */ public final class SubDoublesEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; - public SubDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, + public SubDoublesEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { + this.warnings = new Warnings(source); this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; @@ -52,11 +58,25 @@ public Block eval(Page page) { public DoubleBlock eval(int positionCount, DoubleBlock lhsBlock, DoubleBlock rhsBlock) { try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } @@ -86,19 +106,22 @@ public void close() { } static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + private final EvalOperator.ExpressionEvaluator.Factory lhs; private final EvalOperator.ExpressionEvaluator.Factory rhs; - public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; this.lhs = lhs; this.rhs = rhs; } @Override public SubDoublesEvaluator get(DriverContext context) { - return new SubDoublesEvaluator(lhs.get(context), rhs.get(context), context); + return new SubDoublesEvaluator(source, lhs.get(context), rhs.get(context), context); } @Override diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index 4013cdd240dd0..72bd7e4b6848a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public IntBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(IntBlock.Builder result = driverContext.blockFactory().newIntBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index 7528750da15f8..88cb6bf287d8d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index 6c2a31db0a6f0..1ef9034d76f62 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -5,6 +5,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; import org.elasticsearch.compute.data.Block; @@ -58,11 +59,25 @@ public Block eval(Page page) { public LongBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) { try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - if (lhsBlock.isNull(p) || lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.isNull(p)) { result.appendNull(); continue position; } - if (rhsBlock.isNull(p) || rhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) != 1) { + if (lhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rhsBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rhsBlock.getValueCount(p) != 1) { + if (rhsBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } result.appendNull(); continue position; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 132df0d3a5afd..280ef898c3b90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -154,6 +154,7 @@ static class Nots extends ExpressionMapper { public ExpressionEvaluator.Factory map(Not not, Layout layout) { var expEval = toEvaluator(not.field(), layout); return dvrCtx -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.logical.NotEvaluator( + not.source(), expEval.get(dvrCtx), dvrCtx ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java index d5a3e1cc6244c..36c19825fab85 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; @@ -16,11 +17,10 @@ import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; -import java.util.function.BiFunction; - import static org.elasticsearch.xpack.esql.evaluator.EvalMapper.toEvaluator; public abstract class ComparisonMapper extends ExpressionMapper { @@ -74,18 +74,18 @@ public abstract class ComparisonMapper extends Expre ) { }; - private final BiFunction ints; - private final BiFunction longs; - private final BiFunction doubles; - private final BiFunction keywords; - private final BiFunction bools; + private final TriFunction ints; + private final TriFunction longs; + private final TriFunction doubles; + private final TriFunction keywords; + private final TriFunction bools; private ComparisonMapper( - BiFunction ints, - BiFunction longs, - BiFunction doubles, - BiFunction keywords, - BiFunction bools + TriFunction ints, + TriFunction longs, + TriFunction doubles, + TriFunction keywords, + TriFunction bools ) { this.ints = ints; this.longs = longs; @@ -95,16 +95,16 @@ private ComparisonMapper( } ComparisonMapper( - BiFunction ints, - BiFunction longs, - BiFunction doubles, - BiFunction keywords + TriFunction ints, + TriFunction longs, + TriFunction doubles, + TriFunction keywords ) { this.ints = ints; this.longs = longs; this.doubles = doubles; this.keywords = keywords; - this.bools = (lhs, rhs) -> { throw EsqlIllegalArgumentException.illegalDataType(DataTypes.BOOLEAN); }; + this.bools = (source, lhs, rhs) -> { throw EsqlIllegalArgumentException.illegalDataType(DataTypes.BOOLEAN); }; } @Override @@ -129,20 +129,20 @@ public final ExpressionEvaluator.Factory map(BinaryComparison bc, Layout layout) var leftEval = toEvaluator(bc.left(), layout); var rightEval = toEvaluator(bc.right(), layout); if (leftType == DataTypes.KEYWORD || leftType == DataTypes.TEXT || leftType == DataTypes.IP || leftType == DataTypes.VERSION) { - return keywords.apply(leftEval, rightEval); + return keywords.apply(bc.source(), leftEval, rightEval); } if (leftType == DataTypes.BOOLEAN) { - return bools.apply(leftEval, rightEval); + return bools.apply(bc.source(), leftEval, rightEval); } if (leftType == DataTypes.DATETIME) { - return longs.apply(leftEval, rightEval); + return longs.apply(bc.source(), leftEval, rightEval); } if (leftType == EsqlDataTypes.GEO_POINT) { - return longs.apply(leftEval, rightEval); + return longs.apply(bc.source(), leftEval, rightEval); } // TODO: Perhaps neithger geo_point, not cartesian_point should support comparisons? if (leftType == EsqlDataTypes.CARTESIAN_POINT) { - return longs.apply(leftEval, rightEval); + return longs.apply(bc.source(), leftEval, rightEval); } throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); } @@ -151,10 +151,10 @@ public static ExpressionEvaluator.Factory castToEvaluator( BinaryOperator op, Layout layout, DataType required, - BiFunction factory + TriFunction factory ) { - var lhs = Cast.cast(op.left().dataType(), required, toEvaluator(op.left(), layout)); - var rhs = Cast.cast(op.right().dataType(), required, toEvaluator(op.right(), layout)); - return factory.apply(lhs, rhs); + var lhs = Cast.cast(op.source(), op.left().dataType(), required, toEvaluator(op.left(), layout)); + var rhs = Cast.cast(op.source(), op.right().dataType(), required, toEvaluator(op.right(), layout)); + return factory.apply(op.source(), lhs, rhs); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java index c0fa71a59e415..f37751e18858f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMapper.java @@ -20,7 +20,8 @@ public abstract class RegexMapper extends ExpressionMapper> { public static final ExpressionMapper REGEX_MATCH = new RegexMapper() { @Override public ExpressionEvaluator.Factory map(RegexMatch expression, Layout layout) { - return dvrCtx -> new org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RegexMatchEvaluator( + return dvrCtx -> new RegexMatchEvaluator( + expression.source(), EvalMapper.toEvaluator(expression.field(), layout).get(dvrCtx), new CharacterRunAutomaton(((AbstractStringPattern) expression.pattern()).createAutomaton()), dvrCtx diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 948e44f946920..25477e501645d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -110,16 +110,16 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator.apply(new MvMax(e.source(), e))) .toArray(ExpressionEvaluator.Factory[]::new); if (dataType == DataTypes.BOOLEAN) { - return new GreatestBooleanEvaluator.Factory(factories); + return new GreatestBooleanEvaluator.Factory(source(), factories); } if (dataType == DataTypes.DOUBLE) { - return new GreatestDoubleEvaluator.Factory(factories); + return new GreatestDoubleEvaluator.Factory(source(), factories); } if (dataType == DataTypes.INTEGER) { - return new GreatestIntEvaluator.Factory(factories); + return new GreatestIntEvaluator.Factory(source(), factories); } if (dataType == DataTypes.LONG) { - return new GreatestLongEvaluator.Factory(factories); + return new GreatestLongEvaluator.Factory(source(), factories); } if (dataType == DataTypes.KEYWORD || dataType == DataTypes.TEXT @@ -127,7 +127,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator.apply(new MvMin(e.source(), e))) .toArray(ExpressionEvaluator.Factory[]::new); if (dataType == DataTypes.BOOLEAN) { - return new LeastBooleanEvaluator.Factory(factories); + return new LeastBooleanEvaluator.Factory(source(), factories); } if (dataType == DataTypes.DOUBLE) { - return new LeastDoubleEvaluator.Factory(factories); + return new LeastDoubleEvaluator.Factory(source(), factories); } if (dataType == DataTypes.INTEGER) { - return new LeastIntEvaluator.Factory(factories); + return new LeastIntEvaluator.Factory(source(), factories); } if (dataType == DataTypes.LONG) { - return new LeastLongEvaluator.Factory(factories); + return new LeastLongEvaluator.Factory(source(), factories); } if (dataType == DataTypes.KEYWORD || dataType == DataTypes.TEXT @@ -127,7 +127,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var fieldEvaluator = toEvaluator.apply(field); if (format == null) { - return dvrCtx -> new DateFormatConstantEvaluator(fieldEvaluator.get(dvrCtx), UTC_DATE_TIME_FORMATTER, dvrCtx); + return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), UTC_DATE_TIME_FORMATTER, dvrCtx); } if (format.dataType() != DataTypes.KEYWORD) { throw new IllegalArgumentException("unsupported data type for format [" + format.dataType() + "]"); } if (format.foldable()) { DateFormatter formatter = toFormatter(format.fold(), ((EsqlConfiguration) configuration()).locale()); - return dvrCtx -> new DateFormatConstantEvaluator(fieldEvaluator.get(dvrCtx), formatter, dvrCtx); + return dvrCtx -> new DateFormatConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), formatter, dvrCtx); } var formatEvaluator = toEvaluator.apply(format); return dvrCtx -> new DateFormatEvaluator( + source(), fieldEvaluator.get(dvrCtx), formatEvaluator.get(dvrCtx), ((EsqlConfiguration) configuration()).locale(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 0c70c9065dfc4..e5063bc0cbab4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -149,10 +149,14 @@ public ExpressionEvaluator.Factory toEvaluator(Function info() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return dvrCtx -> new NowEvaluator(now, dvrCtx); + return dvrCtx -> new NowEvaluator(source(), now, dvrCtx); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index 36d889ea1b19a..f47637ced2e90 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -58,6 +58,7 @@ public CIDRMatch(Source source, Expression ipField, List matches) { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); return dvrCtx -> new CIDRMatchEvaluator( + source(), ipEvaluatorSupplier.get(dvrCtx), matches.stream().map(x -> toEvaluator.apply(x).get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), dvrCtx diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index 90766a95e9cc0..8bc3ba3b184e9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -52,16 +52,16 @@ static int process(int fieldVal) { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); if (dataType() == DataTypes.DOUBLE) { - return new AbsDoubleEvaluator.Factory(field); + return new AbsDoubleEvaluator.Factory(source(), field); } if (dataType() == DataTypes.UNSIGNED_LONG) { return field; } if (dataType() == DataTypes.LONG) { - return new AbsLongEvaluator.Factory(field); + return new AbsLongEvaluator.Factory(source(), field); } if (dataType() == DataTypes.INTEGER) { - return new AbsIntEvaluator.Factory(field); + return new AbsIntEvaluator.Factory(source(), field); } throw EsqlIllegalArgumentException.illegalDataType(dataType()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java index 08a842e8b9fd7..305228362a9d0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java @@ -33,7 +33,7 @@ abstract class AbstractTrigonometricFunction extends UnaryScalarFunction impleme @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return doubleEvaluator(Cast.cast(field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field()))); + return doubleEvaluator(Cast.cast(source(), field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field()))); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index f730b3358a7f1..88079e60fa66a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -28,7 +28,7 @@ public Atan(Source source, @Param(name = "n", type = { "integer", "long", "doubl @Override protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { - return new AtanEvaluator.Factory(field); + return new AtanEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index 31fdea6e0d00c..e754aff1853b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -85,9 +85,9 @@ public boolean foldable() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var yEval = Cast.cast(y.dataType(), DataTypes.DOUBLE, toEvaluator.apply(y)); - var xEval = Cast.cast(x.dataType(), DataTypes.DOUBLE, toEvaluator.apply(x)); - return new Atan2Evaluator.Factory(yEval, xEval); + var yEval = Cast.cast(source(), y.dataType(), DataTypes.DOUBLE, toEvaluator.apply(y)); + var xEval = Cast.cast(source(), x.dataType(), DataTypes.DOUBLE, toEvaluator.apply(x)); + return new Atan2Evaluator.Factory(source(), yEval, xEval); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index 33115352d9e54..27abeb44b2ff0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -115,7 +115,11 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); - return new IsFiniteEvaluator.Factory(field); + return new IsFiniteEvaluator.Factory(source(), field); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java index 70e8137d8871e..80068f3aaf8d4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java @@ -23,7 +23,7 @@ public IsInfinite(Source source, Expression field) { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new IsInfiniteEvaluator.Factory(toEvaluator.apply(field())); + return new IsInfiniteEvaluator.Factory(source(), toEvaluator.apply(field())); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java index 4db5534631fc9..07875987f74d7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java @@ -23,7 +23,7 @@ public IsNaN(Source source, Expression field) { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new IsNaNEvaluator.Factory(toEvaluator.apply(field())); + return new IsNaNEvaluator.Factory(source(), toEvaluator.apply(field())); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 9e160e7c2f15f..0658dcccbbb48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -112,8 +112,8 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var baseEval = Cast.cast(base.dataType(), DataTypes.DOUBLE, toEvaluator.apply(base)); - var expEval = Cast.cast(exponent.dataType(), DataTypes.DOUBLE, toEvaluator.apply(exponent)); + var baseEval = Cast.cast(source(), base.dataType(), DataTypes.DOUBLE, toEvaluator.apply(base)); + var expEval = Cast.cast(source(), exponent.dataType(), DataTypes.DOUBLE, toEvaluator.apply(exponent)); return new PowEvaluator.Factory(source(), baseEval, expEval); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 4e1d12606a34f..3cbc74b3b6c28 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; @@ -38,6 +39,8 @@ public class Round extends ScalarFunction implements OptionalArgument, EvaluatorMapper { + private static final BiFunction EVALUATOR_IDENTITY = (s, e) -> e; + private final Expression field, decimals; public Round(Source source, Expression field, Expression decimals) { @@ -137,28 +140,28 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator, - Function noDecimals, - BiFunction withDecimals + BiFunction noDecimals, + TriFunction withDecimals ) { var fieldEvaluator = toEvaluator.apply(field()); if (decimals == null) { - return noDecimals.apply(fieldEvaluator); + return noDecimals.apply(source(), fieldEvaluator); } - var decimalsEvaluator = Cast.cast(decimals().dataType(), DataTypes.LONG, toEvaluator.apply(decimals())); - return withDecimals.apply(fieldEvaluator, decimalsEvaluator); + var decimalsEvaluator = Cast.cast(source(), decimals().dataType(), DataTypes.LONG, toEvaluator.apply(decimals())); + return withDecimals.apply(source(), fieldEvaluator, decimalsEvaluator); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index eaf632ee8c40e..7487d8df90395 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -32,7 +32,7 @@ public Sin( @Override protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { - return new SinEvaluator.Factory(field); + return new SinEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index dc9e3bc2b3fde..bdaf3a9498b09 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -46,7 +46,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var values = children().stream().map(toEvaluator).toArray(ExpressionEvaluator.Factory[]::new); - return new ConcatEvaluator.Factory(context -> new BreakingBytesRefBuilder(context.breaker(), "concat"), values); + return new ConcatEvaluator.Factory(source(), context -> new BreakingBytesRefBuilder(context.breaker(), "concat"), values); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index 1140bfcf1f5d9..250cbfad69b39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -98,6 +98,6 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new EndsWithEvaluator.Factory(toEvaluator.apply(str), toEvaluator.apply(suffix)); + return new EndsWithEvaluator.Factory(source(), toEvaluator.apply(str), toEvaluator.apply(suffix)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java index 382f64fcf831c..bcb4b81c452f3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java @@ -50,7 +50,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new LTrimEvaluator.Factory(toEvaluator.apply(field())); + return new LTrimEvaluator.Factory(source(), toEvaluator.apply(field())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 14cb03943f520..65d3a6388f790 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -74,6 +74,7 @@ static BytesRef process( @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { return new LeftEvaluator.Factory( + source, context -> new BytesRef(), context -> new UnicodeUtil.UTF8CodePoint(), toEvaluator.apply(str), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index 47ee8f20e7f32..9f944c62af6a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -72,6 +72,6 @@ protected NodeInfo info() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new LengthEvaluator.Factory(toEvaluator.apply(field())); + return new LengthEvaluator.Factory(source(), toEvaluator.apply(field())); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java index 98fc93b4f6acc..f694999ec2767 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java @@ -50,7 +50,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new RTrimEvaluator.Factory(toEvaluator.apply(field())); + return new RTrimEvaluator.Factory(source(), toEvaluator.apply(field())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index f77c703e7cb0c..7e96f7a396472 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -78,6 +78,7 @@ static BytesRef process( @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { return new RightEvaluator.Factory( + source, context -> new BytesRef(), context -> new UnicodeUtil.UTF8CodePoint(), toEvaluator.apply(str), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 7f18be0e7b18e..950486b1b0eed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -122,12 +122,12 @@ protected NodeInfo info() { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var str = toEvaluator.apply(left()); if (right().foldable() == false) { - return new SplitVariableEvaluator.Factory(str, toEvaluator.apply(right()), context -> new BytesRef()); + return new SplitVariableEvaluator.Factory(source(), str, toEvaluator.apply(right()), context -> new BytesRef()); } BytesRef delim = (BytesRef) right().fold(); if (delim.length != 1) { throw new QlIllegalArgumentException("for now delimiter must be a single byte"); } - return new SplitSingleByteEvaluator.Factory(str, delim.bytes[delim.offset], context -> new BytesRef()); + return new SplitSingleByteEvaluator.Factory(source(), str, delim.bytes[delim.offset], context -> new BytesRef()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index 3497d9360b187..d78ad3df64d1f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -91,6 +91,6 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return new StartsWithEvaluator.Factory(toEvaluator.apply(str), toEvaluator.apply(prefix)); + return new StartsWithEvaluator.Factory(source(), toEvaluator.apply(str), toEvaluator.apply(prefix)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 261b7aeb19da2..c287aeafc8d80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -135,9 +135,9 @@ public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); - return new TrimEvaluator.Factory(field); + return new TrimEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java index 1e1da2634fadf..4439c4ebc754e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java @@ -35,7 +35,7 @@ public Add(Source source, Expression left, Expression right) { AddIntsEvaluator.Factory::new, AddLongsEvaluator.Factory::new, AddUnsignedLongsEvaluator.Factory::new, - (s, lhs, rhs) -> new AddDoublesEvaluator.Factory(lhs, rhs), + (s, lhs, rhs) -> new AddDoublesEvaluator.Factory(source, lhs, rhs), AddDatetimesEvaluator.Factory::new ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java index 0bcbe21c60a63..42fd526cb3b99 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java @@ -34,7 +34,7 @@ public Div(Source source, Expression left, Expression right, DataType type) { DivIntsEvaluator.Factory::new, DivLongsEvaluator.Factory::new, DivUnsignedLongsEvaluator.Factory::new, - (s, lhs, rhs) -> new DivDoublesEvaluator.Factory(lhs, rhs) + (s, lhs, rhs) -> new DivDoublesEvaluator.Factory(source, lhs, rhs) ); this.type = type; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index dc5be3373198b..5f8006d10f6cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -115,8 +115,8 @@ public ExpressionEvaluator.Factory toEvaluator(Function new ModDoublesEvaluator.Factory(lhs, rhs) + (s, lhs, rhs) -> new ModDoublesEvaluator.Factory(source, lhs, rhs) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java index 963f09486a361..3eb78ef953f8f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java @@ -30,7 +30,7 @@ public Mul(Source source, Expression left, Expression right) { MulIntsEvaluator.Factory::new, MulLongsEvaluator.Factory::new, MulUnsignedLongsEvaluator.Factory::new, - (s, lhs, rhs) -> new MulDoublesEvaluator.Factory(lhs, rhs) + (s, lhs, rhs) -> new MulDoublesEvaluator.Factory(source, lhs, rhs) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java index 2ad5c5b9de5b5..37c8d6dd652e3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java @@ -55,7 +55,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function new SubDoublesEvaluator.Factory(lhs, rhs), + (s, lhs, rhs) -> new SubDoublesEvaluator.Factory(source, lhs, rhs), SubDatetimesEvaluator.Factory::new ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 0197ecd4f5a24..bdd93d733a460 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanNamedReader; import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry.PlanReader; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; @@ -24,11 +23,9 @@ import org.elasticsearch.xpack.ql.expression.NameId; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.ql.tree.Location; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.EsField; -import org.elasticsearch.xpack.ql.util.StringUtils; import java.io.IOException; import java.util.Collection; @@ -38,6 +35,8 @@ import java.util.function.LongFunction; import java.util.function.Supplier; +import static org.elasticsearch.xpack.ql.util.SourceUtils.readSourceWithText; + /** * A customized stream input used to deserialize ESQL physical plan fragments. Complements stream * input with methods that read plan nodes, Attributes, Expressions, etc. @@ -107,44 +106,7 @@ public PhysicalPlan readPhysicalPlanNode() throws IOException { public Source readSource() throws IOException { boolean hasSource = readBoolean(); - if (hasSource) { - int line = readInt(); - int column = readInt(); - int length = readInt(); - int charPositionInLine = column - 1; - return new Source(new Location(line, charPositionInLine), sourceText(configuration.query(), line, column, length)); - } - return Source.EMPTY; - } - - private static String sourceText(String query, int line, int column, int length) { - if (line <= 0 || column <= 0 || query.isEmpty()) { - return StringUtils.EMPTY; - } - int offset = textOffset(query, line, column); - if (offset + length > query.length()) { - throw new EsqlIllegalArgumentException( - "location [@" + line + ":" + column + "] and length [" + length + "] overrun query size [" + query.length() + "]" - ); - } - return query.substring(offset, offset + length); - } - - private static int textOffset(String query, int line, int column) { - int offset = 0; - if (line > 1) { - String[] lines = query.split("\n"); - if (line > lines.length) { - throw new EsqlIllegalArgumentException( - "line location [" + line + "] higher than max [" + lines.length + "] in query [" + query + "]" - ); - } - for (int i = 0; i < line - 1; i++) { - offset += lines[i].length() + 1; // +1 accounts for the removed \n - } - } - offset += column - 1; // -1 since column is 1-based indexed - return offset; + return hasSource ? readSourceWithText(this, configuration.query()) : Source.EMPTY; } public Expression readExpression() throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index 846c28b1ce719..66bd4163013ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -20,6 +20,8 @@ import java.io.IOException; import java.util.function.Function; +import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSourceNoText; + /** * A customized stream output used to serialize ESQL physical plan fragments. Complements stream * output with methods that write plan nodes, Attributes, Expressions, etc. @@ -52,9 +54,7 @@ public void writePhysicalPlanNode(PhysicalPlan physicalPlan) throws IOException public void writeSource(Source source) throws IOException { writeBoolean(true); - writeInt(source.source().getLineNumber()); - writeInt(source.source().getColumnNumber()); - writeInt(source.text().length()); + writeSourceNoText(this, source); } public void writeNoSource() throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java index e419be2b7e1fc..0e481c3dd762b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java @@ -41,12 +41,16 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.esql.expression.function.Warnings; import org.elasticsearch.xpack.ql.querydsl.query.Query; import org.elasticsearch.xpack.ql.tree.Source; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.xpack.ql.util.SourceUtils.readSource; +import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSource; + /** * Lucene query that wraps another query and only selects documents that match * the wrapped query and have a single field value. @@ -68,6 +72,8 @@ public class SingleValueQuery extends Query { Builder::new ); + public static final String MULTI_VALUE_WARNING = "single-value function encountered multi-value"; + private final Query next; private final String field; @@ -94,7 +100,7 @@ public void enrichNestedSort(NestedSortBuilder sort) { @Override public Builder asBuilder() { - return new Builder(next.asBuilder(), field, new Stats()); + return new Builder(next.asBuilder(), field, new Stats(), next.source()); } @Override @@ -125,11 +131,13 @@ public static class Builder extends AbstractQueryBuilder { private final QueryBuilder next; private final String field; private final Stats stats; + private final Source source; - Builder(QueryBuilder next, String field, Stats stats) { + Builder(QueryBuilder next, String field, Stats stats, Source source) { this.next = next; this.field = field; this.stats = stats; + this.source = source; } Builder(StreamInput in) throws IOException { @@ -137,12 +145,21 @@ public static class Builder extends AbstractQueryBuilder { this.next = in.readNamedWriteable(QueryBuilder.class); this.field = in.readString(); this.stats = new Stats(); + if (in.getTransportVersion().onOrAfter(TransportVersions.SOURCE_IN_SINGLE_VALUE_QUERY_ADDED)) { + this.source = readSource(in); + } else { + this.source = Source.EMPTY; + + } } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeNamedWriteable(next); out.writeString(field); + if (out.getTransportVersion().onOrAfter(TransportVersions.SOURCE_IN_SINGLE_VALUE_QUERY_ADDED)) { + writeSource(out, source); + } } public QueryBuilder next() { @@ -153,6 +170,10 @@ public String field() { return field; } + public Source source() { + return source; + } + @Override public String getWriteableName() { return ENTRY.name; @@ -163,6 +184,7 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.startObject(ENTRY.name); builder.field("field", field); builder.field("next", next, params); + builder.field("source", source.toString()); builder.endObject(); } @@ -178,7 +200,12 @@ protected org.apache.lucene.search.Query doToQuery(SearchExecutionContext contex stats.missingField++; return new MatchNoDocsQuery("missing field [" + field + "]"); } - return new LuceneQuery(next.toQuery(context), context.getForField(ft, MappedFieldType.FielddataOperation.SEARCH), stats); + return new LuceneQuery( + next.toQuery(context), + context.getForField(ft, MappedFieldType.FielddataOperation.SEARCH), + stats, + new Warnings(source) + ); } @Override @@ -191,7 +218,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws if (rewritten == next) { return this; } - return new Builder(rewritten, field, stats); + return new Builder(rewritten, field, stats, source); } @Override @@ -210,14 +237,16 @@ Stats stats() { } private static class LuceneQuery extends org.apache.lucene.search.Query { - private final org.apache.lucene.search.Query next; + final org.apache.lucene.search.Query next; private final IndexFieldData fieldData; private final Stats stats; + private final Warnings warnings; - LuceneQuery(org.apache.lucene.search.Query next, IndexFieldData fieldData, Stats stats) { + LuceneQuery(org.apache.lucene.search.Query next, IndexFieldData fieldData, Stats stats, Warnings warnings) { this.next = next; this.fieldData = fieldData; this.stats = stats; + this.warnings = warnings; } @Override @@ -237,12 +266,12 @@ public org.apache.lucene.search.Query rewrite(IndexReader reader) throws IOExcep if (rewritten == next) { return this; } - return new LuceneQuery(rewritten, fieldData, stats); + return new LuceneQuery(rewritten, fieldData, stats, warnings); } @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { - return new SingleValueWeight(this, next.createWeight(searcher, scoreMode, boost), fieldData); + return new SingleValueWeight(this, next.createWeight(searcher, scoreMode, boost), fieldData, warnings); } @Override @@ -254,12 +283,14 @@ public boolean equals(Object obj) { return false; } SingleValueQuery.LuceneQuery other = (SingleValueQuery.LuceneQuery) obj; - return next.equals(other.next) && fieldData.getFieldName().equals(other.fieldData.getFieldName()); + return next.equals(other.next) + && fieldData.getFieldName().equals(other.fieldData.getFieldName()) + && warnings.equals(other.warnings); } @Override public int hashCode() { - return Objects.hash(classHash(), next, fieldData); + return Objects.hash(classHash(), next, fieldData, warnings); } @Override @@ -278,12 +309,14 @@ private static class SingleValueWeight extends Weight { private final Stats stats; private final Weight next; private final IndexFieldData fieldData; + private final Warnings warnings; - private SingleValueWeight(SingleValueQuery.LuceneQuery query, Weight next, IndexFieldData fieldData) { + private SingleValueWeight(SingleValueQuery.LuceneQuery query, Weight next, IndexFieldData fieldData, Warnings warnings) { super(query); this.stats = query.stats; this.next = next; this.fieldData = fieldData; + this.warnings = warnings; } @Override @@ -354,14 +387,14 @@ private Scorer scorer(LeafReaderContext context, Scorer nextScorer, LeafNumericF return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedNumericsAndSinglePhaseQueries(nextScorer.iterator(), sortedNumerics) + new TwoPhaseIteratorForSortedNumericsAndSinglePhaseQueries(nextScorer.iterator(), sortedNumerics, warnings) ); } stats.numericMultiApprox++; return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries(nextIterator, sortedNumerics) + new TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries(nextIterator, sortedNumerics, warnings) ); } @@ -388,14 +421,14 @@ private Scorer scorer(LeafReaderContext context, Scorer nextScorer, LeafOrdinals return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedSetAndSinglePhaseQueries(nextScorer.iterator(), sortedSet) + new TwoPhaseIteratorForSortedSetAndSinglePhaseQueries(nextScorer.iterator(), sortedSet, warnings) ); } stats.ordinalsMultiApprox++; return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedSetAndTwoPhaseQueries(nextIterator, sortedSet) + new TwoPhaseIteratorForSortedSetAndTwoPhaseQueries(nextIterator, sortedSet, warnings) ); } @@ -407,14 +440,14 @@ private Scorer scorer(Scorer nextScorer, LeafFieldData lfd) { return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries(nextScorer.iterator(), sortedBinary) + new TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries(nextScorer.iterator(), sortedBinary, warnings) ); } stats.bytesApprox++; return new SingleValueQueryScorer( this, nextScorer, - new TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries(nextIterator, sortedBinary) + new TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries(nextIterator, sortedBinary, warnings) ); } @@ -469,13 +502,16 @@ public int docID() { private static class TwoPhaseIteratorForSortedNumericsAndSinglePhaseQueries extends TwoPhaseIterator { private final SortedNumericDocValues sortedNumerics; + private final Warnings warnings; private TwoPhaseIteratorForSortedNumericsAndSinglePhaseQueries( DocIdSetIterator approximation, - SortedNumericDocValues sortedNumerics + SortedNumericDocValues sortedNumerics, + Warnings warning ) { super(approximation); this.sortedNumerics = sortedNumerics; + this.warnings = warning; } @Override @@ -483,7 +519,11 @@ public boolean matches() throws IOException { if (false == sortedNumerics.advanceExact(approximation.docID())) { return false; } - return sortedNumerics.docValueCount() == 1; + if (sortedNumerics.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); + return false; + } + return true; } @Override @@ -495,11 +535,17 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries extends TwoPhaseIterator { private final SortedNumericDocValues sortedNumerics; private final TwoPhaseIterator next; + private final Warnings warnings; - private TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries(TwoPhaseIterator next, SortedNumericDocValues sortedNumerics) { + private TwoPhaseIteratorForSortedNumericsAndTwoPhaseQueries( + TwoPhaseIterator next, + SortedNumericDocValues sortedNumerics, + Warnings warnings + ) { super(next.approximation()); this.sortedNumerics = sortedNumerics; this.next = next; + this.warnings = warnings; } @Override @@ -508,6 +554,7 @@ public boolean matches() throws IOException { return false; } if (sortedNumerics.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); return false; } return next.matches(); @@ -521,10 +568,16 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries extends TwoPhaseIterator { private final SortedBinaryDocValues sortedBinary; + private final Warnings warnings; - private TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries(DocIdSetIterator approximation, SortedBinaryDocValues sortedBinary) { + private TwoPhaseIteratorForSortedBinaryAndSinglePhaseQueries( + DocIdSetIterator approximation, + SortedBinaryDocValues sortedBinary, + Warnings warnings + ) { super(approximation); this.sortedBinary = sortedBinary; + this.warnings = warnings; } @Override @@ -532,7 +585,11 @@ public boolean matches() throws IOException { if (false == sortedBinary.advanceExact(approximation.docID())) { return false; } - return sortedBinary.docValueCount() == 1; + if (sortedBinary.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); + return false; + } + return true; } @Override @@ -544,11 +601,13 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedSetAndTwoPhaseQueries extends TwoPhaseIterator { private final SortedSetDocValues sortedSet; private final TwoPhaseIterator next; + private final Warnings warnings; - private TwoPhaseIteratorForSortedSetAndTwoPhaseQueries(TwoPhaseIterator next, SortedSetDocValues sortedSet) { + private TwoPhaseIteratorForSortedSetAndTwoPhaseQueries(TwoPhaseIterator next, SortedSetDocValues sortedSet, Warnings warnings) { super(next.approximation()); this.sortedSet = sortedSet; this.next = next; + this.warnings = warnings; } @Override @@ -557,6 +616,7 @@ public boolean matches() throws IOException { return false; } if (sortedSet.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); return false; } return next.matches(); @@ -570,10 +630,16 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedSetAndSinglePhaseQueries extends TwoPhaseIterator { private final SortedSetDocValues sortedSet; + private final Warnings warnings; - private TwoPhaseIteratorForSortedSetAndSinglePhaseQueries(DocIdSetIterator approximation, SortedSetDocValues sortedSet) { + private TwoPhaseIteratorForSortedSetAndSinglePhaseQueries( + DocIdSetIterator approximation, + SortedSetDocValues sortedSet, + Warnings warnings + ) { super(approximation); this.sortedSet = sortedSet; + this.warnings = warnings; } @Override @@ -581,7 +647,11 @@ public boolean matches() throws IOException { if (false == sortedSet.advanceExact(approximation.docID())) { return false; } - return sortedSet.docValueCount() == 1; + if (sortedSet.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); + return false; + } + return true; } @Override @@ -593,11 +663,17 @@ public float matchCost() { private static class TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries extends TwoPhaseIterator { private final SortedBinaryDocValues sortedBinary; private final TwoPhaseIterator next; + private final Warnings warnings; - private TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries(TwoPhaseIterator next, SortedBinaryDocValues sortedBinary) { + private TwoPhaseIteratorForSortedBinaryAndTwoPhaseQueries( + TwoPhaseIterator next, + SortedBinaryDocValues sortedBinary, + Warnings warnings + ) { super(next.approximation()); this.sortedBinary = sortedBinary; this.next = next; + this.warnings = warnings; } @Override @@ -606,6 +682,7 @@ public boolean matches() throws IOException { return false; } if (sortedBinary.docValueCount() != 1) { + warnings.registerException(new IllegalArgumentException(MULTI_VALUE_WARNING)); return false; } return next.matches(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 99b21225e1985..dbb7c1f130a1b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -441,6 +441,6 @@ private void assertWarnings(List warnings) { normalized.add(normW); } } - assertMap(normalized, matchesList(testCase.expectedWarnings)); + assertMap(normalized, matchesList(testCase.expectedWarnings(true))); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index f003170a7551d..3bac4f1c4b5c0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -436,6 +436,15 @@ public final void testSimpleWithNulls() { // TODO replace this with nulls insert assertSimpleWithNulls(data, block, i); } } + + // Note: the null-in-fast-null-out handling prevents any exception from being thrown, so the warnings provided in some test + // cases won't actually be registered. This isn't an issue for unary functions, but could be an issue for n-ary ones, if + // function processing of the first parameter(s) could raise an exception/warning. (But hasn't been the case so far.) + // For n-ary functions, dealing with one multivalue (before hitting the null parameter injected above) will now trigger + // a warning ("SV-function encountered a MV") that thus needs to be checked. + if (simpleData.stream().anyMatch(List.class::isInstance) && testCase.getExpectedWarnings() != null) { + assertWarnings(testCase.getExpectedWarnings()); + } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index 5ef485e8ba441..6bf816ce4c734 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -70,7 +70,8 @@ public static Iterable parameters() { "RoundDoubleEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", DataTypes.DOUBLE, is(nullValue()) - ); + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 0189d29e5e717..cc52a965e70b6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -179,7 +179,8 @@ public static Iterable parameters() { "AddIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataTypes.INTEGER, is(nullValue()) - ); + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index e45a86375fd4e..db924d0d68c53 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -155,7 +155,8 @@ public static Iterable parameters() { "SubIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataTypes.INTEGER, is(nullValue()) - ); + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 8b185e013a8a5..12b8185cbec5d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.esql.planner.Mapper; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; +import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.type.EsField; import org.junit.Before; @@ -257,7 +259,8 @@ public void testCountOneFieldWithFilter() { assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen")); var stat = as(esStatsQuery.stats().get(0), Stat.class); assertThat(stat.query(), is(QueryBuilders.existsQuery("salary"))); - var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("salary").gt(1000), "salary"); + var source = ((SingleValueQuery.Builder) esStatsQuery.query()).source(); + var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("salary").gt(1000), "salary", source); assertThat(expected.toString(), is(esStatsQuery.query().toString())); } @@ -297,7 +300,8 @@ public void testAnotherCountAllWithFilter() { var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class); assertThat(esStatsQuery.limit(), is(nullValue())); assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen")); - var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no"); + var source = ((SingleValueQuery.Builder) esStatsQuery.query()).source(); + var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", source); assertThat(expected.toString(), is(esStatsQuery.query().toString())); } @@ -308,8 +312,8 @@ public void testAnotherCountAllWithFilter() { * \_AggregateExec[[],[COUNT([2a][KEYWORD]) AS c, COUNT(1[INTEGER]) AS c_literal],FINAL,null] * \_ExchangeExec[[count{r}#18, seen{r}#19, count{r}#20, seen{r}#21],true] * \_EsStatsQueryExec[test], stats[Stat[name=*, type=COUNT, query=null], Stat[name=*, type=COUNT, query=null]]], - * query[{"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}}}}] - * [count{r}#23, seen{r}#24, count{r}#25, seen{r}#26], limit[], + * query[{"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}}, + * "source":"emp_no > 10010@2:9"}}][count{r}#23, seen{r}#24, count{r}#25, seen{r}#26], limit[], */ public void testMultiCountAllWithFilter() { var plan = plan(""" @@ -331,7 +335,8 @@ public void testMultiCountAllWithFilter() { var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class); assertThat(esStatsQuery.limit(), is(nullValue())); assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen", "count", "seen")); - var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no"); + var source = ((SingleValueQuery.Builder) esStatsQuery.query()).source(); + var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no", source); assertThat(expected.toString(), is(esStatsQuery.query().toString())); } @@ -376,8 +381,8 @@ public boolean exists(String field) { assertThat(Expressions.names(localSource.output()), contains("count", "seen")); } - private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName) { - return FilterTests.singleValueQuery(inner, fieldName); + private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName, Source source) { + return FilterTests.singleValueQuery(inner, fieldName, source); } private Stat queryStatsFor(PhysicalPlan plan) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index 774ac24d3cd02..926f9dd27f84f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; @@ -31,6 +32,7 @@ import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; +import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.EsField; import org.elasticsearch.xpack.ql.util.Queries; import org.junit.BeforeClass; @@ -49,12 +51,13 @@ import static org.elasticsearch.xpack.ql.util.Queries.Clause.FILTER; import static org.elasticsearch.xpack.ql.util.Queries.Clause.MUST; import static org.elasticsearch.xpack.ql.util.Queries.Clause.SHOULD; +import static org.elasticsearch.xpack.ql.util.SourceUtils.writeSource; import static org.hamcrest.Matchers.nullValue; public class FilterTests extends ESTestCase { // use a field that already exists in the mapping - private static final String AT_TIMESTAMP = "emp_no"; + private static final String EMP_NO = "emp_no"; private static final String OTHER_FIELD = "salary"; private static EsqlParser parser; @@ -82,7 +85,7 @@ public static void init() { } public void testTimestampRequestFilterNoQueryFilter() { - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test @@ -99,24 +102,29 @@ public void testTimestampNoRequestFilterQueryFilter() { var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} - """, AT_TIMESTAMP, value), null); + """, EMP_NO, value), null); var filter = filterQueryForTransportNodes(plan); - var expected = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(value), AT_TIMESTAMP); + var expected = singleValueQuery(rangeQuery(EMP_NO).gt(value), EMP_NO, ((SingleValueQuery.Builder) filter).source()); assertEquals(expected.toString(), filter.toString()); } public void testTimestampRequestFilterQueryFilter() { var value = 10; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > 10 - """, AT_TIMESTAMP, value), restFilter); + """, EMP_NO, value), restFilter); var filter = filterQueryForTransportNodes(plan); - var queryFilter = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(value).includeUpper(false), AT_TIMESTAMP); + var builder = ((BoolQueryBuilder) filter).filter().get(1); + var queryFilter = singleValueQuery( + rangeQuery(EMP_NO).gt(value).includeUpper(false), + EMP_NO, + ((SingleValueQuery.Builder) builder).source() + ); var expected = Queries.combine(FILTER, asList(restFilter, queryFilter)); assertEquals(expected.toString(), filter.toString()); } @@ -124,16 +132,17 @@ public void testTimestampRequestFilterQueryFilter() { public void testTimestampRequestFilterQueryFilterWithConjunction() { var lowValue = 10; var highValue = 100; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} AND {} < {} - """, AT_TIMESTAMP, lowValue, AT_TIMESTAMP, highValue), restFilter); + """, EMP_NO, lowValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); - var left = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(lowValue), AT_TIMESTAMP); - var right = singleValueQuery(rangeQuery(AT_TIMESTAMP).lt(highValue), AT_TIMESTAMP); + var musts = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).must(); + var left = singleValueQuery(rangeQuery(EMP_NO).gt(lowValue), EMP_NO, ((SingleValueQuery.Builder) musts.get(0)).source()); + var right = singleValueQuery(rangeQuery(EMP_NO).lt(highValue), EMP_NO, ((SingleValueQuery.Builder) musts.get(1)).source()); var must = Queries.combine(MUST, asList(left, right)); var expected = Queries.combine(FILTER, asList(restFilter, must)); assertEquals(expected.toString(), filter.toString()); @@ -142,12 +151,12 @@ public void testTimestampRequestFilterQueryFilterWithConjunction() { public void testTimestampRequestFilterQueryFilterWithDisjunctionOnDifferentFields() { var lowValue = 10; var highValue = 100; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} OR {} < {} - """, OTHER_FIELD, lowValue, AT_TIMESTAMP, highValue), restFilter); + """, OTHER_FIELD, lowValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); var expected = restFilter; @@ -157,16 +166,17 @@ public void testTimestampRequestFilterQueryFilterWithDisjunctionOnDifferentField public void testTimestampRequestFilterQueryFilterWithDisjunctionOnSameField() { var lowValue = 10; var highValue = 100; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} OR {} < {} - """, AT_TIMESTAMP, lowValue, AT_TIMESTAMP, highValue), restFilter); + """, EMP_NO, lowValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); - var left = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(lowValue), AT_TIMESTAMP); - var right = singleValueQuery(rangeQuery(AT_TIMESTAMP).lt(highValue), AT_TIMESTAMP); + var shoulds = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).should(); + var left = singleValueQuery(rangeQuery(EMP_NO).gt(lowValue), EMP_NO, ((SingleValueQuery.Builder) shoulds.get(0)).source()); + var right = singleValueQuery(rangeQuery(EMP_NO).lt(highValue), EMP_NO, ((SingleValueQuery.Builder) shoulds.get(1)).source()); var should = Queries.combine(SHOULD, asList(left, right)); var expected = Queries.combine(FILTER, asList(restFilter, should)); assertEquals(expected.toString(), filter.toString()); @@ -176,16 +186,17 @@ public void testTimestampRequestFilterQueryFilterWithMultiConjunction() { var lowValue = 10; var highValue = 100; var eqValue = 1234; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} AND {} == {} AND {} < {} - """, AT_TIMESTAMP, lowValue, OTHER_FIELD, eqValue, AT_TIMESTAMP, highValue), restFilter); + """, EMP_NO, lowValue, OTHER_FIELD, eqValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); - var left = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(lowValue), AT_TIMESTAMP); - var right = singleValueQuery(rangeQuery(AT_TIMESTAMP).lt(highValue), AT_TIMESTAMP); + var musts = ((BoolQueryBuilder) ((BoolQueryBuilder) filter).filter().get(1)).must(); + var left = singleValueQuery(rangeQuery(EMP_NO).gt(lowValue), EMP_NO, ((SingleValueQuery.Builder) musts.get(0)).source()); + var right = singleValueQuery(rangeQuery(EMP_NO).lt(highValue), EMP_NO, ((SingleValueQuery.Builder) musts.get(1)).source()); var must = Queries.combine(MUST, asList(left, right)); var expected = Queries.combine(FILTER, asList(restFilter, must)); assertEquals(expected.toString(), filter.toString()); @@ -196,17 +207,18 @@ public void testTimestampRequestFilterQueryMultipleFilters() { var eqValue = 1234; var highValue = 100; - var restFilter = restFilterQuery(AT_TIMESTAMP); + var restFilter = restFilterQuery(EMP_NO); var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE {} > {} |EVAL {} = {} |WHERE {} > {} - """, AT_TIMESTAMP, lowValue, AT_TIMESTAMP, eqValue, AT_TIMESTAMP, highValue), restFilter); + """, EMP_NO, lowValue, EMP_NO, eqValue, EMP_NO, highValue), restFilter); var filter = filterQueryForTransportNodes(plan); - var queryFilter = singleValueQuery(rangeQuery(AT_TIMESTAMP).gt(lowValue), AT_TIMESTAMP); + var builder = ((BoolQueryBuilder) filter).filter().get(1); + var queryFilter = singleValueQuery(rangeQuery(EMP_NO).gt(lowValue), EMP_NO, ((SingleValueQuery.Builder) builder).source()); var expected = Queries.combine(FILTER, asList(restFilter, queryFilter)); assertEquals(expected.toString(), filter.toString()); } @@ -218,7 +230,7 @@ public void testTimestampOverriddenFilterFilter() { FROM test |EVAL {} = {} |WHERE {} > {} - """, AT_TIMESTAMP, OTHER_FIELD, AT_TIMESTAMP, eqValue), null); + """, EMP_NO, OTHER_FIELD, EMP_NO, eqValue), null); var filter = filterQueryForTransportNodes(plan); assertThat(filter, nullValue()); @@ -230,7 +242,7 @@ public void testTimestampAsFunctionArgument() { var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE to_int(to_string({})) == {} - """, AT_TIMESTAMP, eqValue), null); + """, EMP_NO, eqValue), null); var filter = filterQueryForTransportNodes(plan); assertThat(filter, nullValue()); @@ -242,7 +254,7 @@ public void testTimestampAsFunctionArgumentInsideExpression() { var plan = plan(LoggerMessageFormat.format(null, """ FROM test |WHERE to_int(to_string({})) + 987 == {} - """, AT_TIMESTAMP, eqValue), null); + """, EMP_NO, eqValue), null); var filter = filterQueryForTransportNodes(plan); assertThat(filter, nullValue()); @@ -252,13 +264,14 @@ public void testTimestampAsFunctionArgumentInsideExpression() { * Ugly hack to create a QueryBuilder for SingleValueQuery. * For some reason however the queryName is set to null on range queries when deserializing. */ - public static QueryBuilder singleValueQuery(QueryBuilder inner, String field) { + public static QueryBuilder singleValueQuery(QueryBuilder inner, String field, Source source) { try (BytesStreamOutput out = new BytesStreamOutput()) { // emulate SingleValueQuery writeTo out.writeFloat(AbstractQueryBuilder.DEFAULT_BOOST); out.writeOptionalString(null); out.writeNamedWriteable(inner); out.writeString(field); + writeSource(out, source); StreamInput in = new NamedWriteableAwareStreamInput( ByteBufferStreamInput.wrap(BytesReference.toBytes(out.bytes())), @@ -292,7 +305,7 @@ private QueryBuilder restFilterQuery(String field) { } private QueryBuilder filterQueryForTransportNodes(PhysicalPlan plan) { - return PlannerUtils.detectFilter(plan, AT_TIMESTAMP); + return PlannerUtils.detectFilter(plan, EMP_NO); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuerySerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuerySerializationTests.java index 06c9febec324a..63b674aad7a90 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuerySerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuerySerializationTests.java @@ -12,13 +12,14 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; public class SingleValueQuerySerializationTests extends AbstractWireSerializingTestCase { @Override protected SingleValueQuery.Builder createTestInstance() { - return new SingleValueQuery.Builder(randomQuery(), randomFieldName(), new SingleValueQuery.Stats()); + return new SingleValueQuery.Builder(randomQuery(), randomFieldName(), new SingleValueQuery.Stats(), Source.EMPTY); } private QueryBuilder randomQuery() { @@ -35,12 +36,14 @@ protected SingleValueQuery.Builder mutateInstance(SingleValueQuery.Builder insta case 0 -> new SingleValueQuery.Builder( randomValueOtherThan(instance.next(), this::randomQuery), instance.field(), - new SingleValueQuery.Stats() + new SingleValueQuery.Stats(), + Source.EMPTY ); case 1 -> new SingleValueQuery.Builder( instance.next(), randomValueOtherThan(instance.field(), this::randomFieldName), - new SingleValueQuery.Stats() + new SingleValueQuery.Stats(), + Source.EMPTY ); default -> throw new IllegalArgumentException(); }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index a6eacae2857e7..4322e5fbac2ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -74,44 +74,36 @@ public SingleValueQueryTests(Setup setup) { } public void testMatchAll() throws IOException { - testCase( - new SingleValueQuery(new MatchAll(Source.EMPTY), "foo").asBuilder(), - false, - false, - (fieldValues, count) -> assertThat(count, equalTo((int) fieldValues.stream().filter(l -> l.size() == 1).count())) - ); + testCase(new SingleValueQuery(new MatchAll(Source.EMPTY), "foo").asBuilder(), false, false, this::runCase); } public void testMatchSome() throws IOException { int max = between(1, 100); testCase( - new SingleValueQuery.Builder(new RangeQueryBuilder("i").lt(max), "foo", new SingleValueQuery.Stats()), + new SingleValueQuery.Builder(new RangeQueryBuilder("i").lt(max), "foo", new SingleValueQuery.Stats(), Source.EMPTY), false, false, - (fieldValues, count) -> { - int expected = 0; - for (int i = 0; i < max; i++) { - if (fieldValues.get(i).size() == 1) { - expected++; - } - } - assertThat(count, equalTo(expected)); - } + (fieldValues, count) -> runCase(fieldValues, count, null, max) ); } public void testSubPhrase() throws IOException { testCase( - new SingleValueQuery.Builder(new MatchPhraseQueryBuilder("str", "fox jumped"), "foo", new SingleValueQuery.Stats()), + new SingleValueQuery.Builder( + new MatchPhraseQueryBuilder("str", "fox jumped"), + "foo", + new SingleValueQuery.Stats(), + Source.EMPTY + ), false, true, - (fieldValues, count) -> assertThat(count, equalTo((int) fieldValues.stream().filter(l -> l.size() == 1).count())) + this::runCase ); } public void testMatchNone() throws IOException { testCase( - new SingleValueQuery.Builder(new MatchNoneQueryBuilder(), "foo", new SingleValueQuery.Stats()), + new SingleValueQuery.Builder(new MatchNoneQueryBuilder(), "foo", new SingleValueQuery.Stats(), Source.EMPTY), true, false, (fieldValues, count) -> assertThat(count, equalTo(0)) @@ -120,7 +112,7 @@ public void testMatchNone() throws IOException { public void testRewritesToMatchNone() throws IOException { testCase( - new SingleValueQuery.Builder(new TermQueryBuilder("missing", 0), "foo", new SingleValueQuery.Stats()), + new SingleValueQuery.Builder(new TermQueryBuilder("missing", 0), "foo", new SingleValueQuery.Stats(), Source.EMPTY), true, false, (fieldValues, count) -> assertThat(count, equalTo(0)) @@ -141,7 +133,7 @@ public void testNotMatchNone() throws IOException { new SingleValueQuery(new MatchAll(Source.EMPTY).negate(Source.EMPTY), "foo").negate(Source.EMPTY).asBuilder(), false, false, - (fieldValues, count) -> assertThat(count, equalTo((int) fieldValues.stream().filter(l -> l.size() == 1).count())) + this::runCase ); } @@ -151,15 +143,7 @@ public void testNotMatchSome() throws IOException { new SingleValueQuery(new RangeQuery(Source.EMPTY, "i", null, false, max, false, null), "foo").negate(Source.EMPTY).asBuilder(), false, true, - (fieldValues, count) -> { - int expected = 0; - for (int i = max; i < 100; i++) { - if (fieldValues.get(i).size() == 1) { - expected++; - } - } - assertThat(count, equalTo(expected)); - } + (fieldValues, count) -> runCase(fieldValues, count, max, 100) ); } @@ -168,6 +152,30 @@ interface TestCase { void run(List> fieldValues, int count) throws IOException; } + private void runCase(List> fieldValues, int count, Integer docsStart, Integer docsStop) { + int expected = 0; + int min = docsStart != null ? docsStart : 0; + int max = docsStop != null ? docsStop : fieldValues.size(); + for (int i = min; i < max; i++) { + if (fieldValues.get(i).size() == 1) { + expected++; + } + } + assertThat(count, equalTo(expected)); + + // query's count runs against the full set, not just min-to-max + if (fieldValues.stream().anyMatch(x -> x.size() > 1)) { + assertWarnings( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value" + ); + } + } + + private void runCase(List> fieldValues, int count) { + runCase(fieldValues, count, null, null); + } + private void testCase(SingleValueQuery.Builder builder, boolean rewritesToMatchNone, boolean subHasTwoPhase, TestCase testCase) throws IOException { MapperService mapper = createMapperService(mapping(setup::mapping)); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/tree/Source.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/tree/Source.java index ff4fbb7a9d9b0..cf71549925eda 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/tree/Source.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/tree/Source.java @@ -25,6 +25,7 @@ public Source(Location location, String text) { this.text = text; } + // TODO: rename to location() public Location source() { return location; } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SourceUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SourceUtils.java new file mode 100644 index 0000000000000..afba73373df92 --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SourceUtils.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql.util; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.ql.QlIllegalArgumentException; +import org.elasticsearch.xpack.ql.tree.Location; +import org.elasticsearch.xpack.ql.tree.Source; + +import java.io.IOException; + +public final class SourceUtils { + + private SourceUtils() {} + + public static void writeSource(StreamOutput out, Source source) throws IOException { + writeSource(out, source, true); + } + + public static void writeSourceNoText(StreamOutput out, Source source) throws IOException { + writeSource(out, source, false); + } + + public static Source readSource(StreamInput in) throws IOException { + return readSource(in, null); + } + + public static Source readSourceWithText(StreamInput in, String queryText) throws IOException { + return readSource(in, queryText); + } + + private static void writeSource(StreamOutput out, Source source, boolean writeText) throws IOException { + out.writeInt(source.source().getLineNumber()); + out.writeInt(source.source().getColumnNumber()); + if (writeText) { + out.writeString(source.text()); + } else { + out.writeInt(source.text().length()); + } + } + + private static Source readSource(StreamInput in, @Nullable String queryText) throws IOException { + int line = in.readInt(); + int column = in.readInt(); + int charPositionInLine = column - 1; + + String text; + if (queryText == null) { + text = in.readString(); + } else { + int length = in.readInt(); + text = sourceText(queryText, line, column, length); + } + return new Source(new Location(line, charPositionInLine), text); + } + + private static String sourceText(String query, int line, int column, int length) { + if (line <= 0 || column <= 0 || query.isEmpty()) { + return StringUtils.EMPTY; + } + int offset = textOffset(query, line, column); + if (offset + length > query.length()) { + throw new QlIllegalArgumentException( + "location [@" + line + ":" + column + "] and length [" + length + "] overrun query size [" + query.length() + "]" + ); + } + return query.substring(offset, offset + length); + } + + private static int textOffset(String query, int line, int column) { + int offset = 0; + if (line > 1) { + String[] lines = query.split("\n"); + if (line > lines.length) { + throw new QlIllegalArgumentException( + "line location [" + line + "] higher than max [" + lines.length + "] in query [" + query + "]" + ); + } + for (int i = 0; i < line - 1; i++) { + offset += lines[i].length() + 1; // +1 accounts for the removed \n + } + } + offset += column - 1; // -1 since column is 1-based indexed + return offset; + } +} diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 240df6ecb0227..7ddd660645a7c 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -13,6 +13,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; public final class CsvSpecReader { @@ -84,8 +85,40 @@ public static class CsvTestCase { public String query; public String earlySchema; public String expectedResults; - public List expectedWarnings = new ArrayList<>(); + private final List expectedWarnings = new ArrayList<>(); public boolean ignoreOrder; + + // The emulated-specific warnings must always trail the non-emulated ones, if these are present. Otherwise, the closing bracket + // would need to be changed to a less common sequence (like `]#` maybe). + private static final String EMULATED_PREFIX = "#[emulated:"; + + /** + * Returns the warning headers expected to be added by the test. To declare such a header, use the `warning:definition` format + * in the CSV test declaration. The `definition` can use the `EMULATED_PREFIX` string to specify the format of the warning run on + * emulated physical operators, if this differs from the format returned by SingleValueQuery. + * @param forEmulated if true, the tests are run on emulated physical operators; if false, the test case is for queries executed + * on a "full stack" ESQL, having data loaded from Lucene. + * @return the list of headers that are expected to be returned part of the response. + */ + public List expectedWarnings(boolean forEmulated) { + List warnings = new ArrayList<>(expectedWarnings.size()); + for (String warning : expectedWarnings) { + int idx = warning.toLowerCase(Locale.ROOT).indexOf(EMULATED_PREFIX); + if (idx >= 0) { + assertTrue("Invalid warning spec: closing delimiter (]) missing: `" + warning + "`", warning.endsWith("]")); + if (forEmulated) { + if (idx + EMULATED_PREFIX.length() < warning.length() - 1) { + warnings.add(warning.substring(idx + EMULATED_PREFIX.length(), warning.length() - 1)); + } + } else if (idx > 0) { + warnings.add(warning.substring(0, idx)); + } // else: no warnings expected for non-emulated + } else { + warnings.add(warning); + } + } + return warnings; + } } } From 1a8d80321954cb325ef9a33ac58cb7eb919c2deb Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 5 Dec 2023 12:10:35 +0100 Subject: [PATCH 212/263] [Connector API] Implement update name/description action (#102825) --- .../api/connector.update_name.json | 39 ++++ .../entsearch/336_connector_update_name.yml | 81 +++++++ .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 30 +-- .../connector/ConnectorIndexService.java | 71 ++++-- .../action/RestUpdateConnectorNameAction.java | 45 ++++ .../TransportUpdateConnectorNameAction.java | 55 +++++ .../action/UpdateConnectorNameAction.java | 209 ++++++++++++++++++ .../connector/ConnectorIndexServiceTests.java | 46 ++++ ...rNameActionRequestBWCSerializingTests.java | 50 +++++ ...NameActionResponseBWCSerializingTests.java | 42 ++++ .../xpack/security/operator/Constants.java | 1 + 12 files changed, 639 insertions(+), 35 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json new file mode 100644 index 0000000000000..e42d9b5766b0a --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json @@ -0,0 +1,39 @@ +{ + "connector.update_name": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Updates the name and/or description fields in the connector document." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/{connector_id}/_name", + "methods": [ + "PUT" + ], + "parts": { + "connector_id": { + "type": "string", + "description": "The unique identifier of the connector to be updated." + } + } + } + ] + }, + "body": { + "description": "An object containing the connector's name and/or description.", + "required": true + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml new file mode 100644 index 0000000000000..6fe025b4ae002 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/336_connector_update_name.yml @@ -0,0 +1,81 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + + - do: + connector.put: + connector_id: test-connector + body: + index_name: search-1-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + +--- +"Update Connector Name": + - do: + connector.update_name: + connector_id: test-connector + body: + name: test-name + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { name: test-name } + +--- +"Update Connector Name and Description": + - do: + connector.update_name: + connector_id: test-connector + body: + name: test-name + description: test-description + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { name: test-name } + - match: { description: test-description } + +--- +"Update Connector Scheduling - 404 when connector doesn't exist": + - do: + catch: "missing" + connector.update_name: + connector_id: test-non-existent-connector + body: + name: test-name + description: test-description + +--- +"Update Connector Scheduling - 400 status code when connector_id is empty": + - do: + catch: "bad_request" + connector.update_name: + connector_id: "" + body: + name: test-name + description: test-description + +--- +"Update Connector Scheduling - 400 status code when payload is not string": + - do: + catch: "bad_request" + connector.update_name: + connector_id: test-connector + body: + name: + field_1: test + field_2: something + description: test-description diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index ce9bbfa4d6a4b..12bd2f4a25bdd 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -55,6 +55,7 @@ import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; @@ -66,6 +67,7 @@ import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; @@ -73,6 +75,7 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; @@ -221,6 +224,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(UpdateConnectorFilteringAction.INSTANCE, TransportUpdateConnectorFilteringAction.class), new ActionHandler<>(UpdateConnectorLastSeenAction.INSTANCE, TransportUpdateConnectorLastSeenAction.class), new ActionHandler<>(UpdateConnectorLastSyncStatsAction.INSTANCE, TransportUpdateConnectorLastSyncStatsAction.class), + new ActionHandler<>(UpdateConnectorNameAction.INSTANCE, TransportUpdateConnectorNameAction.class), new ActionHandler<>(UpdateConnectorPipelineAction.INSTANCE, TransportUpdateConnectorPipelineAction.class), new ActionHandler<>(UpdateConnectorSchedulingAction.INSTANCE, TransportUpdateConnectorSchedulingAction.class), @@ -295,6 +299,7 @@ public List getRestHandlers( new RestUpdateConnectorFilteringAction(), new RestUpdateConnectorLastSeenAction(), new RestUpdateConnectorLastSyncStatsAction(), + new RestUpdateConnectorNameAction(), new RestUpdateConnectorPipelineAction(), new RestUpdateConnectorSchedulingAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 73d066f64d197..8c0c150ea88af 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -199,7 +199,7 @@ public Connector(StreamInput in) throws IOException { static final ParseField API_KEY_ID_FIELD = new ParseField("api_key_id"); public static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); - static final ParseField DESCRIPTION_FIELD = new ParseField("description"); + public static final ParseField DESCRIPTION_FIELD = new ParseField("description"); public static final ParseField ERROR_FIELD = new ParseField("error"); static final ParseField FEATURES_FIELD = new ParseField("features"); public static final ParseField FILTERING_FIELD = new ParseField("filtering"); @@ -461,6 +461,10 @@ public String getApiKeyId() { return apiKeyId; } + public Map getConfiguration() { + return configuration; + } + public Map getCustomScheduling() { return customScheduling; } @@ -493,6 +497,14 @@ public String getLanguage() { return language; } + public Instant getLastSeen() { + return lastSeen; + } + + public ConnectorSyncInfo getSyncInfo() { + return syncInfo; + } + public String getName() { return name; } @@ -509,8 +521,8 @@ public String getServiceType() { return serviceType; } - public Map getConfiguration() { - return configuration; + public ConnectorStatus getStatus() { + return status; } public Object getSyncCursor() { @@ -521,18 +533,6 @@ public boolean isSyncNow() { return syncNow; } - public ConnectorSyncInfo getSyncInfo() { - return syncInfo; - } - - public Instant getLastSeen() { - return lastSeen; - } - - public ConnectorStatus getStatus() { - return status; - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 624697edfcd85..4f4e9d234c813 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; @@ -214,12 +215,12 @@ public void updateConnectorConfiguration(UpdateConnectorConfigurationAction.Requ } /** - * Updates the {@link ConnectorFiltering} property of a {@link Connector}. + * Updates the error property of a {@link Connector}. * - * @param request Request for updating connector filtering property. - * @param listener Listener to respond to a successful response or an error. + * @param request The request for updating the connector's error. + * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request request, ActionListener listener) { + public void updateConnectorError(UpdateConnectorErrorAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -244,12 +245,12 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ } /** - * Updates the lastSeen property of a {@link Connector}. + * Updates the name and/or description property of a {@link Connector}. * - * @param request The request for updating the connector's lastSeen status. + * @param request The request for updating the connector's name and/or description. * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + public void updateConnectorNameOrDescription(UpdateConnectorNameAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -274,12 +275,12 @@ public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request reques } /** - * Updates the {@link ConnectorSyncInfo} properties in a {@link Connector}. + * Updates the {@link ConnectorFiltering} property of a {@link Connector}. * - * @param request Request for updating connector last sync stats properties. + * @param request Request for updating connector filtering property. * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Request request, ActionListener listener) { + public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -304,12 +305,42 @@ public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Requ } /** - * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. + * Updates the lastSeen property of a {@link Connector}. * - * @param request Request for updating connector ingest pipeline property. + * @param request The request for updating the connector's lastSeen status. + * @param listener The listener for handling responses, including successful updates or errors. + */ + public void updateConnectorLastSeen(UpdateConnectorLastSeenAction.Request request, ActionListener listener) { + try { + String connectorId = request.getConnectorId(); + final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( + new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .id(connectorId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + ); + clientWithOrigin.update( + updateRequest, + new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { + if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { + l.onFailure(new ResourceNotFoundException(connectorId)); + return; + } + l.onResponse(updateResponse); + }) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Updates the {@link ConnectorSyncInfo} properties in a {@link Connector}. + * + * @param request Request for updating connector last sync stats properties. * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { + public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -334,12 +365,12 @@ public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request reques } /** - * Updates the {@link ConnectorScheduling} property of a {@link Connector}. + * Updates the {@link ConnectorIngestPipeline} property of a {@link Connector}. * - * @param request The request for updating the connector's scheduling. - * @param listener The listener for handling responses, including successful updates or errors. + * @param request Request for updating connector ingest pipeline property. + * @param listener Listener to respond to a successful response or an error. */ - public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { + public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( @@ -364,12 +395,12 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re } /** - * Updates the error property of a {@link Connector}. + * Updates the {@link ConnectorScheduling} property of a {@link Connector}. * - * @param request The request for updating the connector's error. + * @param request The request for updating the connector's scheduling. * @param listener The listener for handling responses, including successful updates or errors. */ - public void updateConnectorError(UpdateConnectorErrorAction.Request request, ActionListener listener) { + public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request request, ActionListener listener) { try { String connectorId = request.getConnectorId(); final UpdateRequest updateRequest = new UpdateRequest(CONNECTOR_INDEX_NAME, connectorId).doc( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java new file mode 100644 index 0000000000000..54ce2c9af79e8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestUpdateConnectorNameAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_update_name_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT + "/{connector_id}/_name")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + UpdateConnectorNameAction.Request request = UpdateConnectorNameAction.Request.fromXContentBytes( + restRequest.param("connector_id"), + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + UpdateConnectorNameAction.INSTANCE, + request, + new RestToXContentListener<>(channel, UpdateConnectorNameAction.Response::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java new file mode 100644 index 0000000000000..252734aab1c51 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +public class TransportUpdateConnectorNameAction extends HandledTransportAction< + UpdateConnectorNameAction.Request, + UpdateConnectorNameAction.Response> { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportUpdateConnectorNameAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + UpdateConnectorNameAction.NAME, + transportService, + actionFilters, + UpdateConnectorNameAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute( + Task task, + UpdateConnectorNameAction.Request request, + ActionListener listener + ) { + connectorIndexService.updateConnectorNameOrDescription( + request, + listener.map(r -> new UpdateConnectorNameAction.Response(r.getResult())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java new file mode 100644 index 0000000000000..1db9bbe3aad9d --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java @@ -0,0 +1,209 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class UpdateConnectorNameAction extends ActionType { + + public static final UpdateConnectorNameAction INSTANCE = new UpdateConnectorNameAction(); + public static final String NAME = "cluster:admin/xpack/connector/update_name"; + + public UpdateConnectorNameAction() { + super(NAME, UpdateConnectorNameAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + private final String connectorId; + + @Nullable + private final String name; + + @Nullable + private final String description; + + public Request(String connectorId, String name, String description) { + this.connectorId = connectorId; + this.name = name; + this.description = description; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.connectorId = in.readString(); + this.name = in.readOptionalString(); + this.description = in.readOptionalString(); + } + + public String getConnectorId() { + return connectorId; + } + + public String getName() { + return name; + } + + @Override + public String getDescription() { + return description; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (Strings.isNullOrEmpty(connectorId)) { + validationException = addValidationError("[connector_id] cannot be null or empty.", validationException); + } + if (Strings.isNullOrEmpty(name)) { + validationException = addValidationError("[name] cannot be null or empty.", validationException); + } + + return validationException; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_update_name_request", + false, + ((args, connectorId) -> new UpdateConnectorNameAction.Request(connectorId, (String) args[0], (String) args[1])) + ); + + static { + PARSER.declareStringOrNull(constructorArg(), Connector.NAME_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), Connector.DESCRIPTION_FIELD); + } + + public static UpdateConnectorNameAction.Request fromXContentBytes( + String connectorId, + BytesReference source, + XContentType xContentType + ) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return UpdateConnectorNameAction.Request.fromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static UpdateConnectorNameAction.Request fromXContent(XContentParser parser, String connectorId) throws IOException { + return PARSER.parse(parser, connectorId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (name != null) { + builder.field(Connector.NAME_FIELD.getPreferredName(), name); + } + if (description != null) { + builder.field(Connector.DESCRIPTION_FIELD.getPreferredName(), description); + } + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(connectorId); + out.writeOptionalString(name); + out.writeOptionalString(description); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(connectorId, request.connectorId) + && Objects.equals(name, request.name) + && Objects.equals(description, request.description); + } + + @Override + public int hashCode() { + return Objects.hash(connectorId, name, description); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public Response(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public Response(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index ffa532012d982..1960d14faeda2 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSeenAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorLastSyncStatsAction; +import org.elasticsearch.xpack.application.connector.action.UpdateConnectorNameAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorPipelineAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorSchedulingAction; import org.junit.Before; @@ -215,6 +216,25 @@ public void testUpdateConnectorError() throws Exception { assertThat(updateErrorRequest.getError(), equalTo(indexedConnector.getError())); } + public void testUpdateConnectorNameOrDescription() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + DocWriteResponse resp = awaitPutConnector(connector); + assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + UpdateConnectorNameAction.Request updateNameDescriptionRequest = new UpdateConnectorNameAction.Request( + connector.getConnectorId(), + randomAlphaOfLengthBetween(5, 15), + randomAlphaOfLengthBetween(5, 15) + ); + + DocWriteResponse updateResponse = awaitUpdateConnectorName(updateNameDescriptionRequest); + assertThat(updateResponse.status(), equalTo(RestStatus.OK)); + + Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + assertThat(updateNameDescriptionRequest.getName(), equalTo(indexedConnector.getName())); + assertThat(updateNameDescriptionRequest.getDescription(), equalTo(indexedConnector.getDescription())); + } + private DeleteResponse awaitDeleteConnector(String connectorId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -469,6 +489,31 @@ public void onFailure(Exception e) { return resp.get(); } + private UpdateResponse awaitUpdateConnectorName(UpdateConnectorNameAction.Request updatedNameOrDescription) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.updateConnectorNameOrDescription(updatedNameOrDescription, new ActionListener<>() { + @Override + public void onResponse(UpdateResponse indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for update name request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from update name request", resp.get()); + return resp.get(); + } + private UpdateResponse awaitUpdateConnectorError(UpdateConnectorErrorAction.Request updatedError) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); @@ -493,4 +538,5 @@ public void onFailure(Exception e) { assertNotNull("Received null response from update error request", resp.get()); return resp.get(); } + } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..7ee377a7933bf --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionRequestBWCSerializingTests.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorNameActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + UpdateConnectorNameAction.Request> { + + private String connectorId; + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorNameAction.Request::new; + } + + @Override + protected UpdateConnectorNameAction.Request createTestInstance() { + this.connectorId = randomUUID(); + return new UpdateConnectorNameAction.Request(connectorId, randomAlphaOfLengthBetween(5, 15), randomAlphaOfLengthBetween(5, 15)); + } + + @Override + protected UpdateConnectorNameAction.Request mutateInstance(UpdateConnectorNameAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorNameAction.Request doParseInstance(XContentParser parser) throws IOException { + return UpdateConnectorNameAction.Request.fromXContent(parser, this.connectorId); + } + + @Override + protected UpdateConnectorNameAction.Request mutateInstanceForVersion( + UpdateConnectorNameAction.Request instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..2297ccb565b5e --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class UpdateConnectorNameActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + UpdateConnectorNameAction.Response> { + + @Override + protected Writeable.Reader instanceReader() { + return UpdateConnectorNameAction.Response::new; + } + + @Override + protected UpdateConnectorNameAction.Response createTestInstance() { + return new UpdateConnectorNameAction.Response(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected UpdateConnectorNameAction.Response mutateInstance(UpdateConnectorNameAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected UpdateConnectorNameAction.Response mutateInstanceForVersion( + UpdateConnectorNameAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 5c4fd44d77c9b..a432f28f71e54 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -132,6 +132,7 @@ public class Constants { "cluster:admin/xpack/connector/update_filtering", "cluster:admin/xpack/connector/update_last_seen", "cluster:admin/xpack/connector/update_last_sync_stats", + "cluster:admin/xpack/connector/update_name", "cluster:admin/xpack/connector/update_pipeline", "cluster:admin/xpack/connector/update_scheduling", "cluster:admin/xpack/connector/sync_job/cancel", From 96fcf04a121041ae2e69d44c4222e4a84354e60f Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 5 Dec 2023 11:46:49 +0000 Subject: [PATCH 213/263] AwaitsFix for #102974 --- .../search/aggregations/bucket/nested/NestedAggregatorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index fd848895e25f6..b71e10c34eef8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -502,6 +502,7 @@ public void testNestedOrdering() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public void testNestedOrdering_random() throws IOException { int numBooks = randomIntBetween(32, 512); List> books = new ArrayList<>(); From 2502af81980c8156f07f230d12549c7fcb3df7e5 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Tue, 5 Dec 2023 12:52:04 +0100 Subject: [PATCH 214/263] x-pack:apm-data module should mention it is for APM Server (#102866) apm-data module should be more explicit that it is for the apm server usage. It is confusing when starting up ES to see APM is disabled log line. Especially since we also have :modules:apm which is meant to be for sending apm metrics and traces This commit rephrases the log messages and rename APMPlugin class to mention APM Server. --- .../src/main/java/org/elasticsearch/telemetry/apm/APM.java | 5 +++++ x-pack/plugin/apm-data/README.md | 6 ++++-- .../xpack/apmdata/APMIndexTemplateRegistry.java | 4 ++-- .../java/org/elasticsearch/xpack/apmdata/APMPlugin.java | 2 +- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java index bd751f95b2eef..979815f497583 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APM.java @@ -8,6 +8,8 @@ package org.elasticsearch.telemetry.apm; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -44,6 +46,7 @@ * and applies the new settings values, provided those settings can be dynamically updated. */ public class APM extends Plugin implements NetworkPlugin, TelemetryPlugin { + private static final Logger logger = LogManager.getLogger(APM.class); private final SetOnce telemetryProvider = new SetOnce<>(); private final Settings settings; @@ -69,6 +72,8 @@ public Collection createComponents(PluginServices services) { apmAgentSettings.syncAgentSystemProperties(settings); final APMMeterService apmMeter = new APMMeterService(settings); apmAgentSettings.addClusterSettingsListeners(services.clusterService(), telemetryProvider.get(), apmMeter); + logger.info("Sending apm metrics is {}", APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); + logger.info("Sending apm traces is {}", APMAgentSettings.APM_ENABLED_SETTING.get(settings) ? "enabled" : "disabled"); return List.of(apmTracer, apmMeter); } diff --git a/x-pack/plugin/apm-data/README.md b/x-pack/plugin/apm-data/README.md index c4a0d97cb09c0..10892d767b536 100644 --- a/x-pack/plugin/apm-data/README.md +++ b/x-pack/plugin/apm-data/README.md @@ -1,6 +1,8 @@ -## APM Data plugin +NOTE: this plugin is not related to APM Metrics used in ES codebase. The APM Metrics are in :modules:apm -The APM data plugin installs index templates, component templates, and ingest pipelines for Elastic APM. +## APM Ingest plugin + +The APM Ingest plugin installs index templates, component templates, and ingest pipelines for Elastic APM Server. All resources are defined as YAML under [src/main/resources](src/main/resources). diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java index 665ecc16a1e14..44621ee211838 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java @@ -136,7 +136,7 @@ private static ComponentTemplate loadComponentTemplate(String name, int version) final byte[] content = loadVersionedResourceUTF8("/component-templates/" + name + ".yaml", version); return ComponentTemplate.parse(YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, content)); } catch (Exception e) { - throw new RuntimeException("failed to load APM component template: " + name, e); + throw new RuntimeException("failed to load APM Ingest plugin's component template: " + name, e); } } @@ -145,7 +145,7 @@ private static ComposableIndexTemplate loadIndexTemplate(String name, int versio final byte[] content = loadVersionedResourceUTF8("/index-templates/" + name + ".yaml", version); return ComposableIndexTemplate.parse(YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, content)); } catch (Exception e) { - throw new RuntimeException("failed to load APM index template: " + name, e); + throw new RuntimeException("failed to load APM Ingest plugin's index template: " + name, e); } } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java index f3bf0a4eb800e..7acf3a3c972da 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java @@ -33,7 +33,7 @@ public Collection createComponents(PluginServices services) { ) ); APMIndexTemplateRegistry registryInstance = registry.get(); - logger.info("APM is {}", registryInstance.isEnabled() ? "enabled" : "disabled"); + logger.info("APM ingest plugin is {}", registryInstance.isEnabled() ? "enabled" : "disabled"); registryInstance.initialize(); return List.of(registryInstance); } From df097aae01d47655086ff7f854832590d68ba207 Mon Sep 17 00:00:00 2001 From: Abdon Pijpelink Date: Tue, 5 Dec 2023 12:53:21 +0100 Subject: [PATCH 215/263] [DOCS] Change ES|QL getting started guide for updated dataset (#102970) --- docs/reference/esql/esql-get-started.asciidoc | 4 ++-- .../esql-getting-started-sample-data.asciidoc | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 4109d9d6f4ba3..b0b68d281809e 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -158,8 +158,8 @@ is different than this example, where the sorting comes before the limit. Use the <> command to append columns to a table, with calculated values. For example, the following query appends a `duration_ms` column. The -values in the column are computed by dividing `event.duration` by 1,000,000. In -other words: `event.duration` converted from nanoseconds to milliseconds. +values in the column are computed by dividing `event_duration` by 1,000,000. In +other words: `event_duration` converted from nanoseconds to milliseconds. [source,esql] ---- diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc index 434954d8d400a..2a899a9f1ea33 100644 --- a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc +++ b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc @@ -1,7 +1,7 @@ // tag::own-deployment[] First ingest some sample data. In {kib}, open the main menu and select *Dev -Tools*. Run the the following two requests: +Tools*. Run the following two requests: [source,console] ---- @@ -9,7 +9,7 @@ PUT sample_data { "mappings": { "properties": { - "client.ip": { + "client_ip": { "type": "ip" }, "message": { @@ -21,19 +21,19 @@ PUT sample_data PUT sample_data/_bulk {"index": {}} -{"@timestamp": "2023-10-23T12:15:03.360Z", "client.ip": "172.21.2.162", "message": "Connected to 10.1.0.3", "event.duration": 3450233} +{"@timestamp": "2023-10-23T12:15:03.360Z", "client_ip": "172.21.2.162", "message": "Connected to 10.1.0.3", "event_duration": 3450233} {"index": {}} -{"@timestamp": "2023-10-23T12:27:28.948Z", "client.ip": "172.21.2.113", "message": "Connected to 10.1.0.2", "event.duration": 2764889} +{"@timestamp": "2023-10-23T12:27:28.948Z", "client_ip": "172.21.2.113", "message": "Connected to 10.1.0.2", "event_duration": 2764889} {"index": {}} -{"@timestamp": "2023-10-23T13:33:34.937Z", "client.ip": "172.21.0.5", "message": "Disconnected", "event.duration": 1232382} +{"@timestamp": "2023-10-23T13:33:34.937Z", "client_ip": "172.21.0.5", "message": "Disconnected", "event_duration": 1232382} {"index": {}} -{"@timestamp": "2023-10-23T13:51:54.732Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 725448} +{"@timestamp": "2023-10-23T13:51:54.732Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 725448} {"index": {}} -{"@timestamp": "2023-10-23T13:52:55.015Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 8268153} +{"@timestamp": "2023-10-23T13:52:55.015Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 8268153} {"index": {}} -{"@timestamp": "2023-10-23T13:53:55.832Z", "client.ip": "172.21.3.15", "message": "Connection error", "event.duration": 5033755} +{"@timestamp": "2023-10-23T13:53:55.832Z", "client_ip": "172.21.3.15", "message": "Connection error", "event_duration": 5033755} {"index": {}} -{"@timestamp": "2023-10-23T13:55:01.543Z", "client.ip": "172.21.3.15", "message": "Connected to 10.1.0.1", "event.duration": 1756467} +{"@timestamp": "2023-10-23T13:55:01.543Z", "client_ip": "172.21.3.15", "message": "Connected to 10.1.0.1", "event_duration": 1756467} ---- // end::own-deployment[] From ea6013ca9c1fcf6fa6ba76d988ce4150cb381377 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 5 Dec 2023 07:33:32 -0500 Subject: [PATCH 216/263] Test mute for issue #102974 (#102975) related https://github.com/elastic/elasticsearch/issues/102974 --- .../search/aggregations/bucket/nested/NestedAggregatorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index b71e10c34eef8..83a2e856a512e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -563,6 +563,7 @@ public void testNestedOrdering_random() throws IOException { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public void testPreGetChildLeafCollectors() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { From 8d4677e011c1ce68d97704da92cdca1e67b10f78 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Tue, 5 Dec 2023 14:16:35 +0100 Subject: [PATCH 217/263] Add AutoscalingMissedIndicesUpdateException (#102817) Add an exception for the case where we can't update autoscaling index metric stats due to a missed index. --- .../elasticsearch/ElasticsearchException.java | 7 ++++++ .../org/elasticsearch/TransportVersions.java | 1 + ...toscalingMissedIndicesUpdateException.java | 24 +++++++++++++++++++ .../ExceptionSerializationTests.java | 2 ++ 4 files changed, 34 insertions(+) create mode 100644 server/src/main/java/org/elasticsearch/indices/AutoscalingMissedIndicesUpdateException.java diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 5c5133e478ee1..50a5f7420847b 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.AutoscalingMissedIndicesUpdateException; import org.elasticsearch.indices.recovery.RecoveryCommitTooNewException; import org.elasticsearch.rest.ApiNotAvailableException; import org.elasticsearch.rest.RestStatus; @@ -1863,6 +1864,12 @@ private enum ElasticsearchExceptionHandle { AggregationExecutionException.InvalidPath::new, 174, TransportVersions.INVALID_BUCKET_PATH_EXCEPTION_INTRODUCED + ), + MISSED_INDICES_UPDATE_EXCEPTION( + AutoscalingMissedIndicesUpdateException.class, + AutoscalingMissedIndicesUpdateException::new, + 175, + TransportVersions.MISSED_INDICES_UPDATE_EXCEPTION_ADDED ); final Class exceptionClass; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 30769371f3608..369e1da237aa0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -188,6 +188,7 @@ static TransportVersion def(int id) { public static final TransportVersion UPGRADE_TO_LUCENE_9_9 = def(8_555_00_0); public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_DSL_STATUS = def(8_556_00_0); public static final TransportVersion SOURCE_IN_SINGLE_VALUE_QUERY_ADDED = def(8_557_00_0); + public static final TransportVersion MISSED_INDICES_UPDATE_EXCEPTION_ADDED = def(8_558_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/indices/AutoscalingMissedIndicesUpdateException.java b/server/src/main/java/org/elasticsearch/indices/AutoscalingMissedIndicesUpdateException.java new file mode 100644 index 0000000000000..e32af622b5531 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/indices/AutoscalingMissedIndicesUpdateException.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.indices; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; + +public class AutoscalingMissedIndicesUpdateException extends ElasticsearchException { + + public AutoscalingMissedIndicesUpdateException(String message) { + super(message); + } + + public AutoscalingMissedIndicesUpdateException(StreamInput in) throws IOException { + super(in); + } +} diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index f7362c7001c36..3e0d9193ffed9 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotInPrimaryModeException; +import org.elasticsearch.indices.AutoscalingMissedIndicesUpdateException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.indices.recovery.PeerRecoveryNotFound; @@ -825,6 +826,7 @@ public void testIds() { ids.put(172, RecoveryCommitTooNewException.class); ids.put(173, TooManyScrollContextsException.class); ids.put(174, AggregationExecutionException.InvalidPath.class); + ids.put(175, AutoscalingMissedIndicesUpdateException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { From 2703b802c8bba13eddaa4a01ac2ace3d55458863 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 5 Dec 2023 14:29:21 +0100 Subject: [PATCH 218/263] Add cluster def link to CONTRIBUTING.md (#102979) --- CONTRIBUTING.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index db8cca17a5606..5b68fd807220e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -138,6 +138,16 @@ and then run `curl` in another window like this: curl -u elastic:password localhost:9200 +To send requests to this Elasticsearch instance, either use the built-in `elastic` +user and password as above or use the pre-configured `elastic-admin` user: + + curl -u elastic-admin:elastic-password localhost:9200 + +Security can also be disabled altogether: + + ./gradlew :run -Dtests.es.xpack.security.enabled=false + +The definition of this Elasticsearch cluster can be found [here](build-tools-internal/src/main/groovy/elasticsearch.run.gradle). ### Importing the project into IntelliJ IDEA From 3ace42266821f9baf1823ba4f5fe1a0f52935149 Mon Sep 17 00:00:00 2001 From: Abdon Pijpelink Date: Tue, 5 Dec 2023 14:37:52 +0100 Subject: [PATCH 219/263] [DOCS] More ES|QL getting started updates (#102980) * [DOCS] More ES|QL getting started updates * Change 'server.ip' into 'server_ip' --- docs/reference/esql/esql-get-started.asciidoc | 6 +++--- .../esql-getting-started-enrich-policy.asciidoc | 14 +++++++------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index b0b68d281809e..6e467e1e7312d 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -298,10 +298,10 @@ string, you can use the following `DISSECT` command: include::{esql-specs}/dissect.csv-spec[tag=gs-dissect] ---- -This adds a `server.ip` column to those rows that have a `message` that matches -this pattern. For other rows, the value of `server.ip` is `null`. +This adds a `server_ip` column to those rows that have a `message` that matches +this pattern. For other rows, the value of `server_ip` is `null`. -You can use the new `server.ip` column that's added by the `DISSECT` command in +You can use the new `server_ip` column that's added by the `DISSECT` command in subsequent commands. For example, to determine how many connections each server has accepted: diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc index a1898dffda684..c51a46bdef3b3 100644 --- a/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc +++ b/docs/reference/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc @@ -10,7 +10,7 @@ PUT clientips { "mappings": { "properties": { - "client.ip": { + "client_ip": { "type": "keyword" }, "env": { @@ -22,21 +22,21 @@ PUT clientips PUT clientips/_bulk { "index" : {}} -{ "client.ip": "172.21.0.5", "env": "Development" } +{ "client_ip": "172.21.0.5", "env": "Development" } { "index" : {}} -{ "client.ip": "172.21.2.113", "env": "QA" } +{ "client_ip": "172.21.2.113", "env": "QA" } { "index" : {}} -{ "client.ip": "172.21.2.162", "env": "QA" } +{ "client_ip": "172.21.2.162", "env": "QA" } { "index" : {}} -{ "client.ip": "172.21.3.15", "env": "Production" } +{ "client_ip": "172.21.3.15", "env": "Production" } { "index" : {}} -{ "client.ip": "172.21.3.16", "env": "Production" } +{ "client_ip": "172.21.3.16", "env": "Production" } PUT /_enrich/policy/clientip_policy { "match": { "indices": "clientips", - "match_field": "client.ip", + "match_field": "client_ip", "enrich_fields": ["env"] } } From 903193c97af955e263ecd5a898286d1fa416643b Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 5 Dec 2023 15:08:56 +0100 Subject: [PATCH 220/263] ES|QL: Better management of allowed errors in generative tests (#102969) --- .../qa/rest/generative/GenerativeRestTest.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 9ba54ea1941fd..c341ad26cb7a6 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -17,6 +17,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; @@ -29,10 +30,15 @@ public abstract class GenerativeRestTest extends ESRestTestCase { public static final int MAX_DEPTH = 10; public static final Set ALLOWED_ERRORS = Set.of( - "is ambiguous (to disambiguate use quotes or qualifiers)", - "due to ambiguities being mapped as" + "Reference \\[.*\\] is ambiguous", + "Cannot use field \\[.*\\] due to ambiguities" ); + public static final Set ALLOWED_ERROR_PATTERNS = ALLOWED_ERRORS.stream() + .map(x -> ".*" + x + ".*") + .map(x -> Pattern.compile(x, Pattern.DOTALL)) + .collect(Collectors.toSet()); + @Before public void setup() throws IOException { if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) { @@ -65,8 +71,8 @@ public void test() { } private void checkException(EsqlQueryGenerator.QueryExecuted query) { - for (String allowedError : ALLOWED_ERRORS) { - if (query.exception().getMessage().contains(allowedError)) { + for (Pattern allowedError : ALLOWED_ERROR_PATTERNS) { + if (allowedError.matcher(query.exception().getMessage()).matches()) { return; } } From 0c946ff64d16fca265668c34fd21ba4670b0ba18 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 5 Dec 2023 15:13:18 +0100 Subject: [PATCH 221/263] [Connectors API] Post connector endpoint (#102959) --- .../rest-api-spec/api/connector.post.json | 33 +++ .../elastic-connectors-mappings.json | 3 - .../test/entsearch/300_connector_put.yml | 6 +- .../test/entsearch/305_connector_post.yml | 78 ++++++ .../test/entsearch/310_connector_list.yml | 20 +- .../xpack/application/EnterpriseSearch.java | 5 + .../application/connector/Connector.java | 100 ++++---- .../connector/ConnectorIndexService.java | 43 +++- .../connector/action/GetConnectorAction.java | 8 +- .../connector/action/PostConnectorAction.java | 242 ++++++++++++++++++ .../action/RestPostConnectorAction.java | 45 ++++ .../action/TransportPostConnectorAction.java | 60 +++++ .../action/TransportPutConnectorAction.java | 9 +- .../syncjob/ConnectorSyncJobIndexService.java | 4 +- .../connector/ConnectorIndexServiceTests.java | 115 ++++++--- .../connector/ConnectorTestUtils.java | 16 +- .../application/connector/ConnectorTests.java | 6 +- ...ctorActionResponseBWCSerializingTests.java | 2 +- ...ectorActionRequestBWCSerializingTests.java | 44 ++++ ...ctorActionResponseBWCSerializingTests.java | 36 +++ .../syncjob/ConnectorSyncJobTests.java | 4 +- .../xpack/security/operator/Constants.java | 1 + 22 files changed, 757 insertions(+), 123 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionResponseBWCSerializingTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json new file mode 100644 index 0000000000000..2dfaf150c455a --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json @@ -0,0 +1,33 @@ +{ + "connector.post": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Creates a connector." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ], + "content_type": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector", + "methods": [ + "POST" + ] + } + ] + }, + "body": { + "description": "The connector configuration.", + "required": false + } + } +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json index 2a41662a136a7..651e1c84da73a 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/entsearch/connector/elastic-connectors-mappings.json @@ -18,9 +18,6 @@ "api_key_id": { "type": "keyword" }, - "connector_id": { - "type": "keyword" - }, "configuration": { "type": "object" }, diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml index 464b64a2b24a3..77d4f28721525 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/300_connector_put.yml @@ -22,7 +22,7 @@ setup: connector.get: connector_id: test-connector - - match: { connector_id: test-connector } + - match: { id: test-connector } - match: { index_name: search-test } - match: { name: my-connector } - match: { language: pl } @@ -43,7 +43,7 @@ setup: connector.get: connector_id: test-connector-with-defaults - - match: { connector_id: test-connector-with-defaults } + - match: { id: test-connector-with-defaults } - match: { index_name: search-test } - match: { is_native: false } - match: { sync_now: false } @@ -67,7 +67,7 @@ setup: connector.get: connector_id: test-connector-native - - match: { connector_id: test-connector-native } + - match: { id: test-connector-native } - match: { index_name: search-test } - match: { is_native: true } - match: { sync_now: false } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml new file mode 100644 index 0000000000000..8d0fa14311608 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/305_connector_post.yml @@ -0,0 +1,78 @@ + +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + +--- +'Create Connector': + - do: + connector.post: + body: + index_name: search-test + name: my-connector + language: pl + is_native: false + service_type: super-connector + + - set: { id: id } + - match: { id: $id } + + - do: + connector.get: + connector_id: $id + + - match: { id: $id } + - match: { index_name: search-test } + - match: { name: my-connector } + - match: { language: pl } + - match: { is_native: false } + - match: { service_type: super-connector } + +--- +'Create Connector - Default values are initialized correctly': + - do: + connector.post: + body: + index_name: search-test + + - set: { id: id } + - match: { id: $id } + + - do: + connector.get: + connector_id: $id + + - match: { id: $id } + - match: { index_name: search-test } + - match: { is_native: false } + - match: { sync_now: false } + - match: { status: created } + - match: { configuration: {} } + - match: { custom_scheduling: {} } + - match: { filtering.0.domain: DEFAULT } + +--- +'Create Connector - Native connector is initialized correctly': + - do: + connector.post: + body: + index_name: search-test + is_native: true + + - set: { id: id } + - match: { id: $id } + + - do: + connector.get: + connector_id: $id + + - match: { id: $id } + - match: { index_name: search-test } + - match: { is_native: true } + - match: { sync_now: false } + - match: { status: needs_configuration } + - match: { configuration: {} } + - match: { custom_scheduling: {} } + - match: { filtering.0.domain: DEFAULT } + diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml index 36cd1c283f7e8..52cfcdee0bb85 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml @@ -39,16 +39,16 @@ setup: - match: { count: 3 } - # Alphabetical order by connector_id for results - - match: { results.0.connector_id: "connector-a" } + # Alphabetical order by index_name for results + - match: { results.0.id: "connector-a" } - match: { results.0.index_name: "search-1-test" } - match: { results.0.language: "pl" } - - match: { results.1.connector_id: "connector-b" } + - match: { results.1.id: "connector-b" } - match: { results.1.index_name: "search-2-test" } - match: { results.1.language: "en" } - - match: { results.2.connector_id: "connector-c" } + - match: { results.2.id: "connector-c" } - match: { results.2.index_name: "search-3-test" } - match: { results.2.language: "nl" } @@ -61,12 +61,12 @@ setup: - match: { count: 3 } - # Alphabetical order by connector_id for results - - match: { results.0.connector_id: "connector-b" } + # Alphabetical order by index_name for results + - match: { results.0.id: "connector-b" } - match: { results.0.index_name: "search-2-test" } - match: { results.0.language: "en" } - - match: { results.1.connector_id: "connector-c" } + - match: { results.1.id: "connector-c" } - match: { results.1.index_name: "search-3-test" } - match: { results.1.language: "nl" } @@ -78,12 +78,12 @@ setup: - match: { count: 3 } - # Alphabetical order by connector_id for results - - match: { results.0.connector_id: "connector-a" } + # Alphabetical order by index_name for results + - match: { results.0.id: "connector-a" } - match: { results.0.index_name: "search-1-test" } - match: { results.0.language: "pl" } - - match: { results.1.connector_id: "connector-b" } + - match: { results.1.id: "connector-b" } - match: { results.1.index_name: "search-2-test" } - match: { results.1.language: "en" } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 12bd2f4a25bdd..2119d9d0a4c30 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -45,10 +45,12 @@ import org.elasticsearch.xpack.application.connector.action.DeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.GetConnectorAction; import org.elasticsearch.xpack.application.connector.action.ListConnectorAction; +import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestDeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestListConnectorAction; +import org.elasticsearch.xpack.application.connector.action.RestPostConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.RestUpdateConnectorErrorAction; @@ -61,6 +63,7 @@ import org.elasticsearch.xpack.application.connector.action.TransportDeleteConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportGetConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportListConnectorAction; +import org.elasticsearch.xpack.application.connector.action.TransportPostConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportPutConnectorAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.TransportUpdateConnectorErrorAction; @@ -218,6 +221,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(DeleteConnectorAction.INSTANCE, TransportDeleteConnectorAction.class), new ActionHandler<>(GetConnectorAction.INSTANCE, TransportGetConnectorAction.class), new ActionHandler<>(ListConnectorAction.INSTANCE, TransportListConnectorAction.class), + new ActionHandler<>(PostConnectorAction.INSTANCE, TransportPostConnectorAction.class), new ActionHandler<>(PutConnectorAction.INSTANCE, TransportPutConnectorAction.class), new ActionHandler<>(UpdateConnectorConfigurationAction.INSTANCE, TransportUpdateConnectorConfigurationAction.class), new ActionHandler<>(UpdateConnectorErrorAction.INSTANCE, TransportUpdateConnectorErrorAction.class), @@ -293,6 +297,7 @@ public List getRestHandlers( new RestDeleteConnectorAction(), new RestGetConnectorAction(), new RestListConnectorAction(), + new RestPostConnectorAction(), new RestPutConnectorAction(), new RestUpdateConnectorConfigurationAction(), new RestUpdateConnectorErrorAction(), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 8c0c150ea88af..bcb182774e758 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -32,7 +32,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; /** @@ -107,7 +106,7 @@ public class Connector implements NamedWriteable, ToXContentObject { /** * Constructor for Connector. * - * @param connectorId Unique identifier for the connector. + * @param connectorId Unique identifier for the connector. Used when building get/list response. Equals to doc _id. * @param apiKeyId API key ID used for authentication/authorization against ES. * @param configuration Configuration settings for the connector. * @param customScheduling Custom scheduling settings for the connector. @@ -150,7 +149,7 @@ private Connector( Object syncCursor, boolean syncNow ) { - this.connectorId = Objects.requireNonNull(connectorId, "connectorId cannot be null"); + this.connectorId = connectorId; this.apiKeyId = apiKeyId; this.configuration = configuration; this.customScheduling = customScheduling; @@ -173,7 +172,7 @@ private Connector( } public Connector(StreamInput in) throws IOException { - this.connectorId = in.readString(); + this.connectorId = in.readOptionalString(); this.apiKeyId = in.readOptionalString(); this.configuration = in.readMap(ConnectorConfiguration::new); this.customScheduling = in.readMap(ConnectorCustomSchedule::new); @@ -195,7 +194,7 @@ public Connector(StreamInput in) throws IOException { this.syncNow = in.readBoolean(); } - public static final ParseField ID_FIELD = new ParseField("connector_id"); + public static final ParseField ID_FIELD = new ParseField("id"); static final ParseField API_KEY_ID_FIELD = new ParseField("api_key_id"); public static final ParseField CONFIGURATION_FIELD = new ParseField("configuration"); static final ParseField CUSTOM_SCHEDULING_FIELD = new ParseField("custom_scheduling"); @@ -216,45 +215,48 @@ public Connector(StreamInput in) throws IOException { static final ParseField SYNC_NOW_FIELD = new ParseField("sync_now"); @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("connector", true, (args) -> { - int i = 0; - return new Builder().setConnectorId((String) args[i++]) - .setApiKeyId((String) args[i++]) - .setConfiguration((Map) args[i++]) - .setCustomScheduling((Map) args[i++]) - .setDescription((String) args[i++]) - .setError((String) args[i++]) - .setFeatures((ConnectorFeatures) args[i++]) - .setFiltering((List) args[i++]) - .setIndexName((String) args[i++]) - .setIsNative((Boolean) args[i++]) - .setLanguage((String) args[i++]) - .setLastSeen((Instant) args[i++]) - .setSyncInfo( - new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) - .setLastAccessControlSyncScheduledAt((Instant) args[i++]) - .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) - .setLastDeletedDocumentCount((Long) args[i++]) - .setLastIncrementalSyncScheduledAt((Instant) args[i++]) - .setLastIndexedDocumentCount((Long) args[i++]) - .setLastSyncError((String) args[i++]) - .setLastSyncScheduledAt((Instant) args[i++]) - .setLastSyncStatus((ConnectorSyncStatus) args[i++]) - .setLastSynced((Instant) args[i++]) - .build() - ) - .setName((String) args[i++]) - .setPipeline((ConnectorIngestPipeline) args[i++]) - .setScheduling((ConnectorScheduling) args[i++]) - .setServiceType((String) args[i++]) - .setStatus((ConnectorStatus) args[i++]) - .setSyncCursor(args[i++]) - .setSyncNow((Boolean) args[i]) - .build(); - }); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector", + true, + (args, docId) -> { + int i = 0; + return new Builder().setConnectorId(docId) + .setApiKeyId((String) args[i++]) + .setConfiguration((Map) args[i++]) + .setCustomScheduling((Map) args[i++]) + .setDescription((String) args[i++]) + .setError((String) args[i++]) + .setFeatures((ConnectorFeatures) args[i++]) + .setFiltering((List) args[i++]) + .setIndexName((String) args[i++]) + .setIsNative((Boolean) args[i++]) + .setLanguage((String) args[i++]) + .setLastSeen((Instant) args[i++]) + .setSyncInfo( + new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++]) + .setLastAccessControlSyncScheduledAt((Instant) args[i++]) + .setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastDeletedDocumentCount((Long) args[i++]) + .setLastIncrementalSyncScheduledAt((Instant) args[i++]) + .setLastIndexedDocumentCount((Long) args[i++]) + .setLastSyncError((String) args[i++]) + .setLastSyncScheduledAt((Instant) args[i++]) + .setLastSyncStatus((ConnectorSyncStatus) args[i++]) + .setLastSynced((Instant) args[i++]) + .build() + ) + .setName((String) args[i++]) + .setPipeline((ConnectorIngestPipeline) args[i++]) + .setScheduling((ConnectorScheduling) args[i++]) + .setServiceType((String) args[i++]) + .setStatus((ConnectorStatus) args[i++]) + .setSyncCursor(args[i++]) + .setSyncNow((Boolean) args[i]) + .build(); + } + ); static { - PARSER.declareString(constructorArg(), ID_FIELD); PARSER.declareString(optionalConstructorArg(), API_KEY_ID_FIELD); PARSER.declareField( optionalConstructorArg(), @@ -357,23 +359,25 @@ public Connector(StreamInput in) throws IOException { PARSER.declareBoolean(optionalConstructorArg(), SYNC_NOW_FIELD); } - public static Connector fromXContentBytes(BytesReference source, XContentType xContentType) { + public static Connector fromXContentBytes(BytesReference source, String docId, XContentType xContentType) { try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { - return Connector.fromXContent(parser); + return Connector.fromXContent(parser, docId); } catch (IOException e) { throw new ElasticsearchParseException("Failed to parse a connector document.", e); } } - public static Connector fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); + public static Connector fromXContent(XContentParser parser, String docId) throws IOException { + return PARSER.parse(parser, docId); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(ID_FIELD.getPreferredName(), connectorId); + if (connectorId != null) { + builder.field(ID_FIELD.getPreferredName(), connectorId); + } if (apiKeyId != null) { builder.field(API_KEY_ID_FIELD.getPreferredName(), apiKeyId); } @@ -431,7 +435,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(connectorId); + out.writeOptionalString(connectorId); out.writeOptionalString(apiKeyId); out.writeMap(configuration, StreamOutput::writeWriteable); out.writeMap(customScheduling, StreamOutput::writeWriteable); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 4f4e9d234c813..c3afa6a8b31f6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; @@ -68,13 +69,14 @@ public ConnectorIndexService(Client client) { /** * Creates or updates the {@link Connector} in the underlying index. * + * @param docId The ID of the connector. * @param connector The connector object. * @param listener The action listener to invoke on response/failure. */ - public void putConnector(Connector connector, ActionListener listener) { + public void putConnector(String docId, Connector connector, ActionListener listener) { try { final IndexRequest indexRequest = new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) - .id(connector.getConnectorId()) + .id(docId) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .source(connector.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); clientWithOrigin.index(indexRequest, listener); @@ -83,6 +85,31 @@ public void putConnector(Connector connector, ActionListener l } } + /** + * Creates or updates the {@link Connector} in the underlying index. + * Assigns connector an auto-generated doc ID. + * + * @param connector The connector object. + * @param listener The action listener to invoke on response/failure. + */ + public void postConnector(Connector connector, ActionListener listener) { + try { + final IndexRequest indexRequest = new IndexRequest(CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .source(connector.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + + clientWithOrigin.index( + indexRequest, + ActionListener.wrap( + indexResponse -> listener.onResponse(new PostConnectorAction.Response(indexResponse.getId())), + listener::onFailure + ) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + /** * Gets the {@link Connector} from the underlying index. * @@ -99,7 +126,11 @@ public void getConnector(String connectorId, ActionListener listener) return; } try { - final Connector connector = Connector.fromXContentBytes(getResponse.getSourceAsBytesRef(), XContentType.JSON); + final Connector connector = Connector.fromXContentBytes( + getResponse.getSourceAsBytesRef(), + connectorId, + XContentType.JSON + ); l.onResponse(connector); } catch (Exception e) { listener.onFailure(e); @@ -139,7 +170,7 @@ public void deleteConnector(String connectorId, ActionListener l } /** - * List the {@link Connector} in ascending order of their ids. + * List the {@link Connector} in ascending order of their index names. * * @param from From index to start the search from. * @param size The maximum number of {@link Connector}s to return. @@ -151,7 +182,7 @@ public void listConnectors(int from, int size, ActionListener() { @Override @@ -435,7 +466,7 @@ private static Connector hitToConnector(SearchHit searchHit) { // todo: don't return sensitive data from configuration in list endpoint - return Connector.fromXContentBytes(searchHit.getSourceRef(), XContentType.JSON); + return Connector.fromXContentBytes(searchHit.getSourceRef(), searchHit.getId(), XContentType.JSON); } public record ConnectorResult(List connectors, long totalResults) {} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java index 61d5947489322..9d97b6787c243 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/GetConnectorAction.java @@ -88,7 +88,9 @@ public int hashCode() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + { + builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + } builder.endObject(); return builder; } @@ -131,8 +133,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return connector.toXContent(builder, params); } - public static GetConnectorAction.Response fromXContent(XContentParser parser) throws IOException { - return new GetConnectorAction.Response(Connector.fromXContent(parser)); + public static GetConnectorAction.Response fromXContent(XContentParser parser, String docId) throws IOException { + return new GetConnectorAction.Response(Connector.fromXContent(parser, docId)); } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java new file mode 100644 index 0000000000000..6570b111d8a0e --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/PostConnectorAction.java @@ -0,0 +1,242 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.Connector; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class PostConnectorAction extends ActionType { + + public static final PostConnectorAction INSTANCE = new PostConnectorAction(); + public static final String NAME = "cluster:admin/xpack/connector/post"; + + public PostConnectorAction() { + super(NAME, PostConnectorAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + @Nullable + private final String description; + @Nullable + private final String indexName; + @Nullable + private final Boolean isNative; + @Nullable + private final String language; + @Nullable + private final String name; + @Nullable + private final String serviceType; + + public Request(String description, String indexName, Boolean isNative, String language, String name, String serviceType) { + this.description = description; + this.indexName = indexName; + this.isNative = isNative; + this.language = language; + this.name = name; + this.serviceType = serviceType; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.description = in.readOptionalString(); + this.indexName = in.readOptionalString(); + this.isNative = in.readOptionalBoolean(); + this.language = in.readOptionalString(); + this.name = in.readOptionalString(); + this.serviceType = in.readOptionalString(); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_put_request", + false, + (args) -> new Request( + (String) args[0], + (String) args[1], + (Boolean) args[2], + (String) args[3], + (String) args[4], + (String) args[5] + ) + ); + + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("description")); + PARSER.declareString(optionalConstructorArg(), new ParseField("index_name")); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField("is_native")); + PARSER.declareString(optionalConstructorArg(), new ParseField("language")); + PARSER.declareString(optionalConstructorArg(), new ParseField("name")); + PARSER.declareString(optionalConstructorArg(), new ParseField("service_type")); + } + + public static Request fromXContentBytes(BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return Request.fromXContent(parser); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static Request fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (description != null) { + builder.field("description", description); + } + if (indexName != null) { + builder.field("index_name", indexName); + } + if (isNative != null) { + builder.field("is_native", isNative); + } + if (language != null) { + builder.field("language", language); + } + if (name != null) { + builder.field("name", name); + } + if (serviceType != null) { + builder.field("service_type", serviceType); + } + } + builder.endObject(); + return builder; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(description); + out.writeOptionalString(indexName); + out.writeOptionalBoolean(isNative); + out.writeOptionalString(language); + out.writeOptionalString(name); + out.writeOptionalString(serviceType); + } + + public String getDescription() { + return description; + } + + public String getIndexName() { + return indexName; + } + + public Boolean getIsNative() { + return isNative; + } + + public String getLanguage() { + return language; + } + + public String getName() { + return name; + } + + public String getServiceType() { + return serviceType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(description, request.description) + && Objects.equals(indexName, request.indexName) + && Objects.equals(isNative, request.isNative) + && Objects.equals(language, request.language) + && Objects.equals(name, request.name) + && Objects.equals(serviceType, request.serviceType); + } + + @Override + public int hashCode() { + return Objects.hash(description, indexName, isNative, language, name, serviceType); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final String id; + + public Response(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + public Response(String id) { + this.id = id; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + } + + public String getId() { + return id; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(Connector.ID_FIELD.getPreferredName(), id); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(id, response.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java new file mode 100644 index 0000000000000..9bfa3fd629567 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestPostConnectorAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestPostConnectorAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_post_action"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/" + EnterpriseSearch.CONNECTOR_API_ENDPOINT)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + PostConnectorAction.Request request = PostConnectorAction.Request.fromXContentBytes( + restRequest.content(), + restRequest.getXContentType() + ); + return channel -> client.execute( + PostConnectorAction.INSTANCE, + request, + new RestToXContentListener<>(channel, r -> RestStatus.CREATED, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java new file mode 100644 index 0000000000000..7b66ca81a77f9 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPostConnectorAction.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.ConnectorIndexService; + +import java.util.Objects; + +public class TransportPostConnectorAction extends HandledTransportAction { + + protected final ConnectorIndexService connectorIndexService; + + @Inject + public TransportPostConnectorAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + PostConnectorAction.NAME, + transportService, + actionFilters, + PostConnectorAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorIndexService = new ConnectorIndexService(client); + } + + @Override + protected void doExecute(Task task, PostConnectorAction.Request request, ActionListener listener) { + + Boolean isNative = Objects.requireNonNullElse(request.getIsNative(), false); + + Connector connector = new Connector.Builder().setDescription(request.getDescription()) + .setIndexName(request.getIndexName()) + .setIsNative(isNative) + .setLanguage(request.getLanguage()) + .setName(request.getName()) + .setServiceType(request.getServiceType()) + .build(); + + connectorIndexService.postConnector(connector, listener); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java index 013a8f4a8334d..8f4ac53b03bbd 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportPutConnectorAction.java @@ -47,8 +47,7 @@ protected void doExecute(Task task, PutConnectorAction.Request request, ActionLi Boolean isNative = Objects.requireNonNullElse(request.getIsNative(), false); - Connector connector = new Connector.Builder().setConnectorId(request.getConnectorId()) - .setDescription(request.getDescription()) + Connector connector = new Connector.Builder().setDescription(request.getDescription()) .setIndexName(request.getIndexName()) .setIsNative(isNative) .setLanguage(request.getLanguage()) @@ -56,6 +55,10 @@ protected void doExecute(Task task, PutConnectorAction.Request request, ActionLi .setServiceType(request.getServiceType()) .build(); - connectorIndexService.putConnector(connector, listener.map(r -> new PutConnectorAction.Response(r.getResult()))); + connectorIndexService.putConnector( + request.getConnectorId(), + connector, + listener.map(r -> new PutConnectorAction.Response(r.getResult())) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 9bcd03eb21ca9..2c9ac7c06b91c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -330,9 +330,7 @@ public void onResponse(GetResponse response) { Map source = response.getSource(); @SuppressWarnings("unchecked") - final Connector syncJobConnectorInfo = new Connector.Builder().setConnectorId( - (String) source.get(Connector.ID_FIELD.getPreferredName()) - ) + final Connector syncJobConnectorInfo = new Connector.Builder().setConnectorId(connectorId) .setFiltering((List) source.get(Connector.FILTERING_FIELD.getPreferredName())) .setIndexName((String) source.get(Connector.INDEX_NAME_FIELD.getPreferredName())) .setLanguage((String) source.get(Connector.LANGUAGE_FIELD.getPreferredName())) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 1960d14faeda2..eedfea13c671b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorConfigurationAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorErrorAction; import org.elasticsearch.xpack.application.connector.action.UpdateConnectorFilteringAction; @@ -49,8 +50,20 @@ public void setup() { public void testPutConnector() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); + + Connector indexedConnector = awaitGetConnector(connectorId); + assertThat(connectorId, equalTo(indexedConnector.getConnectorId())); + } + + public void testPostConnector() throws Exception { + Connector connector = ConnectorTestUtils.getRandomConnector(); + PostConnectorAction.Response resp = awaitPostConnector(connector); + + Connector indexedConnector = awaitGetConnector(resp.getId()); + assertThat(resp.getId(), equalTo(indexedConnector.getConnectorId())); } public void testDeleteConnector() throws Exception { @@ -58,9 +71,8 @@ public void testDeleteConnector() throws Exception { List connectorIds = new ArrayList<>(); for (int i = 0; i < numConnectors; i++) { Connector connector = ConnectorTestUtils.getRandomConnector(); - connectorIds.add(connector.getConnectorId()); - DocWriteResponse resp = awaitPutConnector(connector); - assertThat(resp.status(), equalTo(RestStatus.CREATED)); + PostConnectorAction.Response resp = awaitPostConnector(connector); + connectorIds.add(resp.getId()); } String connectorIdToDelete = connectorIds.get(0); @@ -73,8 +85,8 @@ public void testDeleteConnector() throws Exception { public void testUpdateConnectorConfiguration() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); Map connectorConfiguration = connector.getConfiguration() @@ -83,20 +95,21 @@ public void testUpdateConnectorConfiguration() throws Exception { .collect(Collectors.toMap(Map.Entry::getKey, entry -> ConnectorTestUtils.getRandomConnectorConfigurationField())); UpdateConnectorConfigurationAction.Request updateConfigurationRequest = new UpdateConnectorConfigurationAction.Request( - connector.getConnectorId(), + connectorId, connectorConfiguration ); DocWriteResponse updateResponse = awaitUpdateConnectorConfiguration(updateConfigurationRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(connectorConfiguration, equalTo(indexedConnector.getConfiguration())); assertThat(indexedConnector.getStatus(), equalTo(ConnectorStatus.CONFIGURED)); } public void testUpdateConnectorPipeline() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorIngestPipeline updatedPipeline = new ConnectorIngestPipeline.Builder().setName("test-pipeline") @@ -106,20 +119,21 @@ public void testUpdateConnectorPipeline() throws Exception { .build(); UpdateConnectorPipelineAction.Request updatePipelineRequest = new UpdateConnectorPipelineAction.Request( - connector.getConnectorId(), + connectorId, updatedPipeline ); DocWriteResponse updateResponse = awaitUpdateConnectorPipeline(updatePipelineRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(updatedPipeline, equalTo(indexedConnector.getPipeline())); } public void testUpdateConnectorFiltering() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connector); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); List filteringList = IntStream.range(0, 10) @@ -127,33 +141,35 @@ public void testUpdateConnectorFiltering() throws Exception { .collect(Collectors.toList()); UpdateConnectorFilteringAction.Request updateFilteringRequest = new UpdateConnectorFilteringAction.Request( - connector.getConnectorId(), + connectorId, filteringList ); DocWriteResponse updateResponse = awaitUpdateConnectorFiltering(updateFilteringRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(filteringList, equalTo(indexedConnector.getFiltering())); } public void testUpdateConnectorLastSeen() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); - UpdateConnectorLastSeenAction.Request checkInRequest = new UpdateConnectorLastSeenAction.Request(connector.getConnectorId()); + UpdateConnectorLastSeenAction.Request checkInRequest = new UpdateConnectorLastSeenAction.Request(connectorId); DocWriteResponse updateResponse = awaitUpdateConnectorLastSeen(checkInRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnectorTime1 = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnectorTime1 = awaitGetConnector(connectorId); assertNotNull(indexedConnectorTime1.getLastSeen()); - checkInRequest = new UpdateConnectorLastSeenAction.Request(connector.getConnectorId()); + checkInRequest = new UpdateConnectorLastSeenAction.Request(connectorId); updateResponse = awaitUpdateConnectorLastSeen(checkInRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnectorTime2 = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnectorTime2 = awaitGetConnector(connectorId); assertNotNull(indexedConnectorTime2.getLastSeen()); assertTrue(indexedConnectorTime2.getLastSeen().isAfter(indexedConnectorTime1.getLastSeen())); @@ -161,68 +177,70 @@ public void testUpdateConnectorLastSeen() throws Exception { public void testUpdateConnectorLastSyncStats() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); + String connectorId = randomUUID(); - DocWriteResponse resp = awaitPutConnector(connector); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorSyncInfo syncStats = ConnectorTestUtils.getRandomConnectorSyncInfo(); - UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request( - connector.getConnectorId(), - syncStats - ); + UpdateConnectorLastSyncStatsAction.Request lastSyncStats = new UpdateConnectorLastSyncStatsAction.Request(connectorId, syncStats); DocWriteResponse updateResponse = awaitUpdateConnectorLastSyncStats(lastSyncStats); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(syncStats, equalTo(indexedConnector.getSyncInfo())); } public void testUpdateConnectorScheduling() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); ConnectorScheduling updatedScheduling = ConnectorTestUtils.getRandomConnectorScheduling(); UpdateConnectorSchedulingAction.Request updateSchedulingRequest = new UpdateConnectorSchedulingAction.Request( - connector.getConnectorId(), + connectorId, updatedScheduling ); DocWriteResponse updateResponse = awaitUpdateConnectorScheduling(updateSchedulingRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(updatedScheduling, equalTo(indexedConnector.getScheduling())); } public void testUpdateConnectorError() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorErrorAction.Request updateErrorRequest = new UpdateConnectorErrorAction.Request( - connector.getConnectorId(), + connectorId, randomAlphaOfLengthBetween(5, 15) ); DocWriteResponse updateResponse = awaitUpdateConnectorError(updateErrorRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(updateErrorRequest.getError(), equalTo(indexedConnector.getError())); } public void testUpdateConnectorNameOrDescription() throws Exception { Connector connector = ConnectorTestUtils.getRandomConnector(); - DocWriteResponse resp = awaitPutConnector(connector); + String connectorId = randomUUID(); + DocWriteResponse resp = awaitPutConnector(connectorId, connector); assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK))); UpdateConnectorNameAction.Request updateNameDescriptionRequest = new UpdateConnectorNameAction.Request( - connector.getConnectorId(), + connectorId, randomAlphaOfLengthBetween(5, 15), randomAlphaOfLengthBetween(5, 15) ); @@ -230,7 +248,7 @@ public void testUpdateConnectorNameOrDescription() throws Exception { DocWriteResponse updateResponse = awaitUpdateConnectorName(updateNameDescriptionRequest); assertThat(updateResponse.status(), equalTo(RestStatus.OK)); - Connector indexedConnector = awaitGetConnector(connector.getConnectorId()); + Connector indexedConnector = awaitGetConnector(connectorId); assertThat(updateNameDescriptionRequest.getName(), equalTo(indexedConnector.getName())); assertThat(updateNameDescriptionRequest.getDescription(), equalTo(indexedConnector.getDescription())); } @@ -260,11 +278,11 @@ public void onFailure(Exception e) { return resp.get(); } - private DocWriteResponse awaitPutConnector(Connector connector) throws Exception { + private DocWriteResponse awaitPutConnector(String docId, Connector connector) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.putConnector(connector, new ActionListener<>() { + connectorIndexService.putConnector(docId, connector, new ActionListener<>() { @Override public void onResponse(DocWriteResponse indexResponse) { resp.set(indexResponse); @@ -285,6 +303,31 @@ public void onFailure(Exception e) { return resp.get(); } + private PostConnectorAction.Response awaitPostConnector(Connector connector) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference resp = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + connectorIndexService.postConnector(connector, new ActionListener<>() { + @Override + public void onResponse(PostConnectorAction.Response indexResponse) { + resp.set(indexResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + assertTrue("Timeout waiting for post connector request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from post connector request", resp.get()); + return resp.get(); + } + private Connector awaitGetConnector(String connectorId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java index 3488c7d9c8ba7..200b14109059b 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTestUtils.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.application.connector; +import org.elasticsearch.xpack.application.connector.action.PostConnectorAction; import org.elasticsearch.xpack.application.connector.action.PutConnectorAction; import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDependency; import org.elasticsearch.xpack.application.connector.configuration.ConfigurationDisplayType; @@ -52,6 +53,17 @@ public static PutConnectorAction.Request getRandomPutConnectorActionRequest() { ); } + public static PostConnectorAction.Request getRandomPostConnectorActionRequest() { + return new PostConnectorAction.Request( + randomFrom(randomAlphaOfLengthBetween(5, 15)), + randomFrom(randomAlphaOfLengthBetween(5, 15)), + randomFrom(randomBoolean()), + randomFrom(randomAlphaOfLengthBetween(5, 15)), + randomFrom(randomAlphaOfLengthBetween(5, 15)), + randomFrom(randomAlphaOfLengthBetween(5, 15)) + ); + } + public static ConnectorScheduling getRandomConnectorScheduling() { return new ConnectorScheduling.Builder().setAccessControl( new ConnectorScheduling.ScheduleConfig.Builder().setEnabled(randomBoolean()).setInterval(getRandomCronExpression()).build() @@ -226,8 +238,8 @@ public static Map getRandomConnectorConfiguratio } public static Connector getRandomConnector() { - return new Connector.Builder().setConnectorId(randomAlphaOfLength(10)) - .setApiKeyId(randomFrom(new String[] { null, randomAlphaOfLength(10) })) + + return new Connector.Builder().setApiKeyId(randomFrom(new String[] { null, randomAlphaOfLength(10) })) .setConfiguration(getRandomConnectorConfiguration()) .setCustomScheduling(Map.of(randomAlphaOfLengthBetween(5, 10), getRandomConnectorCustomSchedule())) .setDescription(randomFrom(new String[] { null, randomAlphaOfLength(10) })) diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java index 481f50bb41711..c08cd37218aeb 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java @@ -47,10 +47,10 @@ public final void testRandomSerialization() throws IOException { } public void testToXContent() throws IOException { + String connectorId = "test-connector"; String content = XContentHelper.stripWhitespace(""" { "api_key_id":"test", - "connector_id":"test-connector", "custom_scheduling":{ "schedule-key":{ "configuration_overrides":{ @@ -206,12 +206,12 @@ public void testToXContent() throws IOException { "sync_now":false }"""); - Connector connector = Connector.fromXContentBytes(new BytesArray(content), XContentType.JSON); + Connector connector = Connector.fromXContentBytes(new BytesArray(content), connectorId, XContentType.JSON); boolean humanReadable = true; BytesReference originalBytes = toShuffledXContent(connector, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); Connector parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { - parsed = Connector.fromXContent(parser); + parsed = Connector.fromXContent(parser, connectorId); } assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java index bcb1bcc86402f..168e9ec8f433e 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java @@ -38,7 +38,7 @@ protected GetConnectorAction.Response mutateInstance(GetConnectorAction.Response @Override protected GetConnectorAction.Response doParseInstance(XContentParser parser) throws IOException { - return GetConnectorAction.Response.fromXContent(parser); + return GetConnectorAction.Response.fromXContent(parser, connector.getConnectorId()); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..0587ef7da8654 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionRequestBWCSerializingTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class PostConnectorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return PostConnectorAction.Request::new; + } + + @Override + protected PostConnectorAction.Request createTestInstance() { + return ConnectorTestUtils.getRandomPostConnectorActionRequest(); + } + + @Override + protected PostConnectorAction.Request mutateInstance(PostConnectorAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PostConnectorAction.Request doParseInstance(XContentParser parser) throws IOException { + return PostConnectorAction.Request.fromXContent(parser); + } + + @Override + protected PostConnectorAction.Request mutateInstanceForVersion(PostConnectorAction.Request instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..fbce905cb4771 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/PostConnectorActionResponseBWCSerializingTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class PostConnectorActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + @Override + protected Writeable.Reader instanceReader() { + return PostConnectorAction.Response::new; + } + + @Override + protected PostConnectorAction.Response createTestInstance() { + return new PostConnectorAction.Response(randomUUID()); + } + + @Override + protected PostConnectorAction.Response mutateInstance(PostConnectorAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PostConnectorAction.Response mutateInstanceForVersion(PostConnectorAction.Response instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index ace1138b8e987..49a3f0c4ad043 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -50,7 +50,7 @@ public void testFromXContent_WithAllFields_AllSet() throws IOException { "canceled_at": "2023-12-01T14:19:39.394194Z", "completed_at": "2023-12-01T14:19:39.394194Z", "connector": { - "connector_id": "connector-id", + "id": "connector-id", "filtering": [ { "active": { @@ -162,7 +162,7 @@ public void testFromXContent_WithAllNonOptionalFieldsSet_DoesNotThrow() throws I String content = XContentHelper.stripWhitespace(""" { "connector": { - "connector_id": "connector-id", + "id": "connector-id", "filtering": [ { "active": { diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index a432f28f71e54..ccefd8ab6bdb7 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -126,6 +126,7 @@ public class Constants { "cluster:admin/xpack/connector/delete", "cluster:admin/xpack/connector/get", "cluster:admin/xpack/connector/list", + "cluster:admin/xpack/connector/post", "cluster:admin/xpack/connector/put", "cluster:admin/xpack/connector/update_configuration", "cluster:admin/xpack/connector/update_error", From 0b9487b269a68c404c7030094b9ac83af065b283 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 5 Dec 2023 14:15:03 +0000 Subject: [PATCH 222/263] Rethrow errors in S3HttpFixture (#102976) This fixture runs in a context that catches `Throwable` and quietly turns all errors into a `500 Internal Server Error`, which may not lead to a test failure. This commit rethrows any errors on another thread to ensure they lead to test failures. --- .../src/main/java/fixture/s3/S3HttpFixture.java | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java index 539905b4a815f..5bca7523db4a4 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpFixture.java @@ -11,6 +11,7 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.rest.RestStatus; import org.junit.rules.ExternalResource; @@ -48,12 +49,18 @@ protected HttpHandler createHandler() { return new S3HttpHandler(bucket, basePath) { @Override public void handle(final HttpExchange exchange) throws IOException { - final String authorization = exchange.getRequestHeaders().getFirst("Authorization"); - if (authorization == null || authorization.contains(accessKey) == false) { - sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Bad access key"); - return; + try { + final String authorization = exchange.getRequestHeaders().getFirst("Authorization"); + if (authorization == null || authorization.contains(accessKey) == false) { + sendError(exchange, RestStatus.FORBIDDEN, "AccessDenied", "Bad access key"); + return; + } + super.handle(exchange); + } catch (Error e) { + // HttpServer catches Throwable, so we must throw errors on another thread + ExceptionsHelper.maybeDieOnAnotherThread(e); + throw e; } - super.handle(exchange); } }; } From 9f9e60ad3f78935fc5686c5116830f6d873a2066 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 5 Dec 2023 17:22:34 +0100 Subject: [PATCH 223/263] Add RefCountAwareThreadedActionListener and use it for transport messages (#102989) The current ThreadedActionListener is not compatible with ref-counted response types. I created a version of it that correclty handles ref-couting and is otherwise a drop-in replacement for the ThreadedActionListener. Made use of this new listener in all spots where a ref-counted value is used (obviously all noop for now, but soon at least the search response won't be). part of #102030 --- .../TransportSnapshotsStatusAction.java | 4 +- .../AbstractThreadedActionListener.java | 76 +++++++++++++++++++ .../RefCountAwareThreadedActionListener.java | 52 +++++++++++++ .../support/ThreadedActionListener.java | 57 +------------- .../elasticsearch/indices/IndicesService.java | 4 +- .../retention/ExpiredForecastsRemover.java | 4 +- .../TransportGetStackTracesAction.java | 4 +- 7 files changed, 138 insertions(+), 63 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java create mode 100644 server/src/main/java/org/elasticsearch/action/support/RefCountAwareThreadedActionListener.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java index f8b9a9571ddd2..e1f1636781a08 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportSnapshotsStatusAction.java @@ -12,7 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; @@ -144,7 +144,7 @@ protected void masterOperation( new TransportNodesSnapshotsStatus.Request(nodesIds.toArray(Strings.EMPTY_ARRAY)).snapshots(snapshots) .timeout(request.masterNodeTimeout()), // fork to snapshot meta since building the response is expensive for large snapshots - new ThreadedActionListener<>( + new RefCountAwareThreadedActionListener<>( threadPool.executor(ThreadPool.Names.SNAPSHOT_META), listener.delegateFailureAndWrap( (l, nodeSnapshotStatuses) -> buildResponse( diff --git a/server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java b/server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java new file mode 100644 index 0000000000000..54f01abc63833 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; + +import java.util.concurrent.Executor; + +/** + * Base class for action listeners that wrap another action listener and dispatch its completion to an executor. + */ +public abstract class AbstractThreadedActionListener implements ActionListener { + + private static final Logger logger = LogManager.getLogger(AbstractThreadedActionListener.class); + + protected final Executor executor; + protected final ActionListener delegate; + protected final boolean forceExecution; + + protected AbstractThreadedActionListener(Executor executor, boolean forceExecution, ActionListener delegate) { + this.forceExecution = forceExecution; + this.executor = executor; + this.delegate = delegate; + } + + @Override + public final void onFailure(final Exception e) { + executor.execute(new AbstractRunnable() { + @Override + public boolean isForceExecution() { + return forceExecution; + } + + @Override + protected void doRun() { + delegate.onFailure(e); + } + + @Override + public void onRejection(Exception rejectionException) { + rejectionException.addSuppressed(e); + try { + delegate.onFailure(rejectionException); + } catch (Exception doubleFailure) { + rejectionException.addSuppressed(doubleFailure); + onFailure(rejectionException); + } + } + + @Override + public void onFailure(Exception e) { + logger.error(() -> "failed to execute failure callback on [" + AbstractThreadedActionListener.this + "]", e); + assert false : e; + } + + @Override + public String toString() { + return AbstractThreadedActionListener.this + "/onFailure"; + } + }); + } + + @Override + public final String toString() { + return getClass().getSimpleName() + "[" + executor + "/" + delegate + "]"; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/RefCountAwareThreadedActionListener.java b/server/src/main/java/org/elasticsearch/action/support/RefCountAwareThreadedActionListener.java new file mode 100644 index 0000000000000..3b13f37d238a0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/support/RefCountAwareThreadedActionListener.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.core.RefCounted; + +import java.util.concurrent.Executor; + +/** + * Same as {@link ThreadedActionListener} but for {@link RefCounted} types. Makes sure to increment ref-count by one before forking + * to another thread and decrementing after the forked task completes. + */ +public final class RefCountAwareThreadedActionListener extends AbstractThreadedActionListener { + + public RefCountAwareThreadedActionListener(Executor executor, ActionListener delegate) { + super(executor, false, delegate); + } + + @Override + public void onResponse(final Response response) { + response.mustIncRef(); + executor.execute(new ActionRunnable<>(delegate) { + @Override + public boolean isForceExecution() { + return forceExecution; + } + + @Override + protected void doRun() { + listener.onResponse(response); + } + + @Override + public String toString() { + return RefCountAwareThreadedActionListener.this + "/onResponse"; + } + + @Override + public void onAfter() { + response.decRef(); + } + }); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java b/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java index 1a505bdf620ed..5c13d8f93746e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java @@ -8,33 +8,22 @@ package org.elasticsearch.action.support; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.common.util.concurrent.AbstractRunnable; import java.util.concurrent.Executor; /** * An action listener that wraps another action listener and dispatches its completion to an executor. */ -public final class ThreadedActionListener implements ActionListener { - - private static final Logger logger = LogManager.getLogger(ThreadedActionListener.class); - - private final Executor executor; - private final ActionListener delegate; - private final boolean forceExecution; +public final class ThreadedActionListener extends AbstractThreadedActionListener { public ThreadedActionListener(Executor executor, ActionListener delegate) { this(executor, false, delegate); } public ThreadedActionListener(Executor executor, boolean forceExecution, ActionListener delegate) { - this.forceExecution = forceExecution; - this.executor = executor; - this.delegate = delegate; + super(executor, forceExecution, delegate); } @Override @@ -56,46 +45,4 @@ public String toString() { } }); } - - @Override - public void onFailure(final Exception e) { - executor.execute(new AbstractRunnable() { - @Override - public boolean isForceExecution() { - return forceExecution; - } - - @Override - protected void doRun() { - delegate.onFailure(e); - } - - @Override - public void onRejection(Exception rejectionException) { - rejectionException.addSuppressed(e); - try { - delegate.onFailure(rejectionException); - } catch (Exception doubleFailure) { - rejectionException.addSuppressed(doubleFailure); - onFailure(rejectionException); - } - } - - @Override - public void onFailure(Exception e) { - logger.error(() -> "failed to execute failure callback on [" + ThreadedActionListener.this + "]", e); - assert false : e; - } - - @Override - public String toString() { - return ThreadedActionListener.this + "/onFailure"; - } - }); - } - - @Override - public String toString() { - return "ThreadedActionListener[" + executor + "/" + delegate + "]"; - } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 0faa66a9d21da..dbbf2bb98212a 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.DataStream; @@ -890,7 +890,7 @@ public void createShard( .source(mapping.source().string(), XContentType.JSON) .timeout(TimeValue.MAX_VALUE) .masterNodeTimeout(TimeValue.MAX_VALUE), - new ThreadedActionListener<>(threadPool.generic(), listener.map(ignored -> null)) + new RefCountAwareThreadedActionListener<>(threadPool.generic(), listener.map(ignored -> null)) ); }, this, clusterStateVersion); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 424668a20bf05..677e71b304cb9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -14,7 +14,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -110,7 +110,7 @@ public void remove(float requestsPerSec, ActionListener listener, Boole client.execute( TransportSearchAction.TYPE, searchRequest, - new ThreadedActionListener<>(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), forecastStatsHandler) + new RefCountAwareThreadedActionListener<>(threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), forecastStatsHandler) ); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 801ed012de0ee..a51d8b509003a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -14,7 +14,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.RefCountAwareThreadedActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; @@ -764,7 +764,7 @@ private void mget(Client client, List indices, List slice, Action client.prepareMultiGet() .addIds(index.getName(), slice) .setRealtime(realtime) - .execute(new ThreadedActionListener<>(responseExecutor, listener)); + .execute(new RefCountAwareThreadedActionListener<>(responseExecutor, listener)); } } From a71e4c51fdc9a93fceb3271a68c6be5075732d4a Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 5 Dec 2023 16:29:31 +0000 Subject: [PATCH 224/263] Measure embedding size and store in model config (#102939) When OpenAI or HF text embedding model is PUT, evaluate the model, measure the embedding size and add to the service_settings. For the OpenAI models the similarity measure is known for HF models it is unknown. 2 new fields are added to the `service_settings`: ``` "service_settings": { "similarity": "dot_product", "dimensions": 1536 }, ``` ### Example: Create an OpenAI text embedding model ``` PUT _inference/text_embedding/openai_embeddings { "service": "openai", "service_settings": { "api_key": "${API_KEY}" }, "task_settings": { "model": "text-embedding-ada-002" } } # Responds with { "model_id": "openai_embeddings", "task_type": "text_embedding", "service": "openai", "service_settings": { "similarity": "dot_product", "dimensions": 1536 }, "task_settings": { "model": "text-embedding-ada-002" } } ``` --- .../org/elasticsearch/TransportVersions.java | 1 + .../inference/InferenceService.java | 11 ++ .../TransportPutInferenceModelAction.java | 16 ++- .../inference/common/SimilarityMeasure.java | 24 ++++ .../inference/services/ServiceFields.java | 23 ++++ .../inference/services/ServiceUtils.java | 60 +++++++++ .../huggingface/HuggingFaceService.java | 30 +++++ .../HuggingFaceServiceSettings.java | 115 +++++++++++++++--- .../HuggingFaceElserServiceSettings.java | 9 +- .../HuggingFaceEmbeddingsModel.java | 4 + .../services/openai/OpenAiService.java | 37 ++++++ .../openai/OpenAiServiceSettings.java | 115 ++++++++++++++++-- .../embeddings/OpenAiEmbeddingsModel.java | 14 +++ .../HuggingFaceServiceSettingsTests.java | 76 +++++++++--- .../openai/OpenAiServiceSettingsTests.java | 68 ++++++++--- .../OpenAiEmbeddingsModelTests.java | 3 +- 16 files changed, 541 insertions(+), 65 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/SimilarityMeasure.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceFields.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 369e1da237aa0..0b899a863e197 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -189,6 +189,7 @@ static TransportVersion def(int id) { public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_DSL_STATUS = def(8_556_00_0); public static final TransportVersion SOURCE_IN_SINGLE_VALUE_QUERY_ADDED = def(8_557_00_0); public static final TransportVersion MISSED_INDICES_UPDATE_EXCEPTION_ADDED = def(8_558_00_0); + public static final TransportVersion INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED = def(8_559_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 2f83310ea2388..b6ae21977e4bc 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -87,6 +87,17 @@ default void init(Client client) {} */ void start(Model model, ActionListener listener); + /** + * Optionally test the new model configuration in the inference service. + * This function should be called when the model is first created, the + * default action is to do nothing. + * @param model The new model + * @param listener The listener + */ + default void checkModelConfig(Model model, ActionListener listener) { + listener.onResponse(model); + }; + /** * Return true if this model is hosted in the local Elasticsearch cluster * @return True if in cluster diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index f6bb90d701a4a..de561846a7a68 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -162,8 +162,20 @@ private void parseAndStoreModel( ActionListener listener ) { var model = service.parseRequestConfig(modelId, taskType, config, platformArchitectures); - // model is valid good to persist then start - this.modelRegistry.storeModel(model, ActionListener.wrap(r -> { startModel(service, model, listener); }, listener::onFailure)); + + service.checkModelConfig( + model, + ActionListener.wrap( + // model is valid good to persist then start + verifiedModel -> { + modelRegistry.storeModel( + verifiedModel, + ActionListener.wrap(r -> { startModel(service, verifiedModel, listener); }, listener::onFailure) + ); + }, + listener::onFailure + ) + ); } private static void startModel(InferenceService service, Model model, ActionListener listener) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/SimilarityMeasure.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/SimilarityMeasure.java new file mode 100644 index 0000000000000..3028ecd078597 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/SimilarityMeasure.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import java.util.Locale; + +public enum SimilarityMeasure { + COSINE, + DOT_PRODUCT; + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + + public static SimilarityMeasure fromString(String name) { + return valueOf(name.trim().toUpperCase(Locale.ROOT)); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceFields.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceFields.java new file mode 100644 index 0000000000000..80e6e4a6124ec --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceFields.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services; + +/** + * Common strings and definitions shared by service implementations + */ +public final class ServiceFields { + + public static final String SIMILARITY = "similarity"; + public static final String DIMENSIONS = "dimensions"; + public static final String MAX_INPUT_TOKENS = "max_input_tokens"; + public static final String URL = "url"; + + private ServiceFields() { + + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 597cd172ff661..6689229b35da2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -8,18 +8,25 @@ package org.elasticsearch.xpack.inference.services; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Strings; +import org.elasticsearch.inference.InferenceService; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import java.net.URI; import java.net.URISyntaxException; +import java.util.List; import java.util.Map; import java.util.Objects; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; public class ServiceUtils { /** @@ -133,6 +140,20 @@ public static SecureString extractRequiredSecureString( return new SecureString(Objects.requireNonNull(requiredField).toCharArray()); } + public static SimilarityMeasure extractSimilarity(Map map, String scope, ValidationException validationException) { + String similarity = extractOptionalString(map, SIMILARITY, scope, validationException); + + if (similarity != null) { + try { + return SimilarityMeasure.fromString(similarity); + } catch (IllegalArgumentException iae) { + validationException.addValidationError("[" + scope + "] Unknown similarity measure [" + similarity + "]"); + } + } + + return null; + } + public static String extractRequiredString( Map map, String settingName, @@ -187,4 +208,43 @@ public static ElasticsearchStatusException createInvalidModelException(Model mod RestStatus.INTERNAL_SERVER_ERROR ); } + + /** + * Evaluate the model and return the text embedding size + * @param model Should be a text embedding model + * @param service The inference service + * @param listener Size listener + */ + public static void getEmbeddingSize(Model model, InferenceService service, ActionListener listener) { + assert model.getTaskType() == TaskType.TEXT_EMBEDDING; + + service.infer(model, List.of(TEST_EMBEDDING_INPUT), Map.of(), ActionListener.wrap(r -> { + if (r instanceof TextEmbeddingResults embeddingResults) { + if (embeddingResults.embeddings().isEmpty()) { + listener.onFailure( + new ElasticsearchStatusException( + "Could not determine embedding size, no embeddings were returned in test call", + RestStatus.BAD_REQUEST + ) + ); + } else { + listener.onResponse(embeddingResults.embeddings().get(0).values().size()); + } + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Could not determine embedding size. " + + "Expected a result of type [" + + TextEmbeddingResults.NAME + + "] got [" + + r.getWriteableName() + + "]", + RestStatus.BAD_REQUEST + ) + ); + } + }, listener::onFailure)); + } + + private static final String TEST_EMBEDDING_INPUT = "how big"; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index 99e39f6f55912..dc189352c8fc4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -11,11 +11,14 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModel; import org.elasticsearch.xpack.inference.services.huggingface.embeddings.HuggingFaceEmbeddingsModel; @@ -43,6 +46,33 @@ protected HuggingFaceModel createModel( }; } + @Override + public void checkModelConfig(Model model, ActionListener listener) { + if (model instanceof HuggingFaceEmbeddingsModel embeddingsModel) { + ServiceUtils.getEmbeddingSize( + model, + this, + ActionListener.wrap( + size -> listener.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size)), + listener::onFailure + ) + ); + } else { + listener.onResponse(model); + } + } + + private static HuggingFaceEmbeddingsModel updateModelWithEmbeddingDetails(HuggingFaceEmbeddingsModel model, int embeddingSize) { + var serviceSettings = new HuggingFaceServiceSettings( + model.getServiceSettings().uri(), + null, // Similarity measure is unknown + embeddingSize, + null // max input tokens is unknown + ); + + return new HuggingFaceEmbeddingsModel(model, serviceSettings); + } + @Override public String name() { return NAME; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java index dc98990b1ef8c..6464ca0e0fda8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettings.java @@ -12,47 +12,74 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import java.io.IOException; import java.net.URI; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; -public record HuggingFaceServiceSettings(URI uri) implements ServiceSettings { +public class HuggingFaceServiceSettings implements ServiceSettings { public static final String NAME = "hugging_face_service_settings"; - static final String URL = "url"; - public static HuggingFaceServiceSettings fromMap(Map map) { - return new HuggingFaceServiceSettings(extractUri(map, URL)); - } - - public static URI extractUri(Map map, String fieldName) { ValidationException validationException = new ValidationException(); + var uri = extractUri(map, URL, validationException); + + SimilarityMeasure similarityMeasure = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer dims = removeAsType(map, DIMENSIONS, Integer.class); + Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); - String parsedUrl = extractRequiredString(map, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); if (validationException.validationErrors().isEmpty() == false) { throw validationException; } + return new HuggingFaceServiceSettings(uri, similarityMeasure, dims, maxInputTokens); + } - URI uri = convertToUri(parsedUrl, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; + public static URI extractUri(Map map, String fieldName, ValidationException validationException) { + String parsedUrl = extractRequiredString(map, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); + if (parsedUrl == null) { + return null; } + return convertToUri(parsedUrl, fieldName, ModelConfigurations.SERVICE_SETTINGS, validationException); + } - return uri; + private final URI uri; + private final SimilarityMeasure similarity; + private final Integer dimensions; + private final Integer maxInputTokens; + + public HuggingFaceServiceSettings(URI uri) { + this.uri = Objects.requireNonNull(uri); + this.similarity = null; + this.dimensions = null; + this.maxInputTokens = null; } - public HuggingFaceServiceSettings { - Objects.requireNonNull(uri); + public HuggingFaceServiceSettings( + URI uri, + @Nullable SimilarityMeasure similarityMeasure, + @Nullable Integer dimensions, + @Nullable Integer maxInputTokens + ) { + this.uri = Objects.requireNonNull(uri); + this.similarity = similarityMeasure; + this.dimensions = dimensions; + this.maxInputTokens = maxInputTokens; } public HuggingFaceServiceSettings(String url) { @@ -60,15 +87,32 @@ public HuggingFaceServiceSettings(String url) { } public HuggingFaceServiceSettings(StreamInput in) throws IOException { - this(in.readString()); + this.uri = createUri(in.readString()); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED)) { + similarity = in.readOptionalEnum(SimilarityMeasure.class); + dimensions = in.readOptionalVInt(); + maxInputTokens = in.readOptionalVInt(); + } else { + similarity = null; + dimensions = null; + maxInputTokens = null; + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(URL, uri.toString()); + if (similarity != null) { + builder.field(SIMILARITY, similarity); + } + if (dimensions != null) { + builder.field(DIMENSIONS, dimensions); + } + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); + } builder.endObject(); - return builder; } @@ -85,5 +129,42 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(uri.toString()); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED)) { + out.writeOptionalEnum(similarity); + out.writeOptionalVInt(dimensions); + out.writeOptionalVInt(maxInputTokens); + } + } + + public URI uri() { + return uri; + } + + public SimilarityMeasure similarity() { + return similarity; + } + + public Integer dimensions() { + return dimensions; + } + + public Integer maxInputTokens() { + return maxInputTokens; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + HuggingFaceServiceSettings that = (HuggingFaceServiceSettings) o; + return Objects.equals(uri, that.uri) + && similarity == that.similarity + && Objects.equals(dimensions, that.dimensions) + && Objects.equals(maxInputTokens, that.maxInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(uri, similarity, dimensions, maxInputTokens); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index dd185c4ca8385..777c55b56ff3a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -9,6 +9,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.inference.ServiceSettings; @@ -19,6 +20,7 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; @@ -28,7 +30,12 @@ public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSetting static final String URL = "url"; public static HuggingFaceElserServiceSettings fromMap(Map map) { - return new HuggingFaceElserServiceSettings(extractUri(map, URL)); + ValidationException validationException = new ValidationException(); + var uri = extractUri(map, URL, validationException); + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + return new HuggingFaceElserServiceSettings(uri); } public HuggingFaceElserServiceSettings { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java index 1f2e545a06901..ad9f09529de40 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java @@ -43,6 +43,10 @@ public HuggingFaceEmbeddingsModel( super(new ModelConfigurations(modelId, taskType, service, serviceSettings), new ModelSecrets(secrets)); } + public HuggingFaceEmbeddingsModel(HuggingFaceEmbeddingsModel model, HuggingFaceServiceSettings serviceSettings) { + this(model.getModelId(), model.getTaskType(), model.getConfigurations().getService(), serviceSettings, model.getSecretSettings()); + } + @Override public HuggingFaceServiceSettings getServiceSettings() { return (HuggingFaceServiceSettings) super.getServiceSettings(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index acf7b84bfccb1..8a2f6295b41c8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -19,10 +19,12 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import org.elasticsearch.xpack.inference.external.action.openai.OpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; import java.util.List; @@ -134,6 +136,41 @@ public void doInfer( action.execute(input, listener); } + /** + * For text embedding models get the embedding size and + * update the service settings. + * + * @param model The new model + * @param listener The listener + */ + @Override + public void checkModelConfig(Model model, ActionListener listener) { + if (model instanceof OpenAiEmbeddingsModel embeddingsModel) { + ServiceUtils.getEmbeddingSize( + model, + this, + ActionListener.wrap( + size -> listener.onResponse(updateModelWithEmbeddingDetails(embeddingsModel, size)), + listener::onFailure + ) + ); + } else { + listener.onResponse(model); + } + } + + private OpenAiEmbeddingsModel updateModelWithEmbeddingDetails(OpenAiEmbeddingsModel model, int embeddingSize) { + OpenAiServiceSettings serviceSettings = new OpenAiServiceSettings( + model.getServiceSettings().uri(), + model.getServiceSettings().organizationId(), + SimilarityMeasure.DOT_PRODUCT, + embeddingSize, + null + ); + + return new OpenAiEmbeddingsModel(model, serviceSettings); + } + @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_OPENAI_ADDED; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java index 6c7ff17e352d5..5ade2aad0acb4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettings.java @@ -16,24 +16,30 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import java.io.IOException; import java.net.URI; import java.util.Map; +import java.util.Objects; +import static org.elasticsearch.xpack.inference.services.ServiceFields.DIMENSIONS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; +import static org.elasticsearch.xpack.inference.services.ServiceFields.SIMILARITY; +import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractSimilarity; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; /** * Defines the base settings for interacting with OpenAI. - * @param uri an optional uri to override the openai url. This should only be used for testing. */ -public record OpenAiServiceSettings(@Nullable URI uri, @Nullable String organizationId) implements ServiceSettings { +public class OpenAiServiceSettings implements ServiceSettings { public static final String NAME = "openai_service_settings"; - public static final String URL = "url"; public static final String ORGANIZATION = "organization_id"; public static OpenAiServiceSettings fromMap(Map map) { @@ -41,15 +47,18 @@ public static OpenAiServiceSettings fromMap(Map map) { String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); String organizationId = extractOptionalString(map, ORGANIZATION, ModelConfigurations.SERVICE_SETTINGS, validationException); + SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); + Integer dims = removeAsType(map, DIMENSIONS, Integer.class); + Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); - // Throw if any of the settings were empty strings + // Throw if any of the settings were empty strings or invalid if (validationException.validationErrors().isEmpty() == false) { throw validationException; } // the url is optional and only for testing if (url == null) { - return new OpenAiServiceSettings((URI) null, organizationId); + return new OpenAiServiceSettings((URI) null, organizationId, similarity, dims, maxInputTokens); } URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); @@ -58,11 +67,37 @@ public static OpenAiServiceSettings fromMap(Map map) { throw validationException; } - return new OpenAiServiceSettings(uri, organizationId); + return new OpenAiServiceSettings(uri, organizationId, similarity, dims, maxInputTokens); } - public OpenAiServiceSettings(@Nullable String url, @Nullable String organizationId) { - this(createOptionalUri(url), organizationId); + private final URI uri; + private final String organizationId; + private final SimilarityMeasure similarity; + private final Integer dimensions; + private final Integer maxInputTokens; + + public OpenAiServiceSettings( + @Nullable URI uri, + @Nullable String organizationId, + @Nullable SimilarityMeasure similarity, + @Nullable Integer dimensions, + @Nullable Integer maxInputTokens + ) { + this.uri = uri; + this.organizationId = organizationId; + this.similarity = similarity; + this.dimensions = dimensions; + this.maxInputTokens = maxInputTokens; + } + + public OpenAiServiceSettings( + @Nullable String uri, + @Nullable String organizationId, + @Nullable SimilarityMeasure similarity, + @Nullable Integer dimensions, + @Nullable Integer maxInputTokens + ) { + this(createOptionalUri(uri), organizationId, similarity, dimensions, maxInputTokens); } private static URI createOptionalUri(String url) { @@ -74,7 +109,37 @@ private static URI createOptionalUri(String url) { } public OpenAiServiceSettings(StreamInput in) throws IOException { - this(in.readOptionalString(), in.readOptionalString()); + uri = createOptionalUri(in.readOptionalString()); + organizationId = in.readOptionalString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED)) { + similarity = in.readOptionalEnum(SimilarityMeasure.class); + dimensions = in.readOptionalVInt(); + maxInputTokens = in.readOptionalVInt(); + } else { + similarity = null; + dimensions = null; + maxInputTokens = null; + } + } + + public URI uri() { + return uri; + } + + public String organizationId() { + return organizationId; + } + + public SimilarityMeasure similarity() { + return similarity; + } + + public Integer dimensions() { + return dimensions; + } + + public Integer maxInputTokens() { + return maxInputTokens; } @Override @@ -89,10 +154,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (uri != null) { builder.field(URL, uri.toString()); } - if (organizationId != null) { builder.field(ORGANIZATION, organizationId); } + if (similarity != null) { + builder.field(SIMILARITY, similarity); + } + if (dimensions != null) { + builder.field(DIMENSIONS, dimensions); + } + if (maxInputTokens != null) { + builder.field(MAX_INPUT_TOKENS, maxInputTokens); + } builder.endObject(); return builder; @@ -108,5 +181,27 @@ public void writeTo(StreamOutput out) throws IOException { var uriToWrite = uri != null ? uri.toString() : null; out.writeOptionalString(uriToWrite); out.writeOptionalString(organizationId); + if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED)) { + out.writeOptionalEnum(similarity); + out.writeOptionalVInt(dimensions); + out.writeOptionalVInt(maxInputTokens); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + OpenAiServiceSettings that = (OpenAiServiceSettings) o; + return Objects.equals(uri, that.uri) + && Objects.equals(organizationId, that.organizationId) + && Objects.equals(similarity, that.similarity) + && Objects.equals(dimensions, that.dimensions) + && Objects.equals(maxInputTokens, that.maxInputTokens); + } + + @Override + public int hashCode() { + return Objects.hash(uri, organizationId, similarity, dimensions, maxInputTokens); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 02c1e41e0374a..250837d895590 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -20,6 +20,7 @@ import java.util.Map; public class OpenAiEmbeddingsModel extends OpenAiModel { + public OpenAiEmbeddingsModel( String modelId, TaskType taskType, @@ -63,6 +64,19 @@ private OpenAiEmbeddingsModel(OpenAiEmbeddingsModel originalModel, OpenAiEmbeddi ); } + public OpenAiEmbeddingsModel(OpenAiEmbeddingsModel originalModel, OpenAiServiceSettings serviceSettings) { + super( + new ModelConfigurations( + originalModel.getConfigurations().getModelId(), + originalModel.getConfigurations().getTaskType(), + originalModel.getConfigurations().getService(), + serviceSettings, + originalModel.getTaskSettings() + ), + new ModelSecrets(originalModel.getSecretSettings()) + ); + } + @Override public OpenAiServiceSettings getServiceSettings() { return (OpenAiServiceSettings) super.getServiceSettings(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index 001d869f67a5c..7e2a333685321 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -11,6 +11,9 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.HashMap; @@ -22,14 +25,47 @@ public class HuggingFaceServiceSettingsTests extends AbstractWireSerializingTestCase { public static HuggingFaceServiceSettings createRandom() { - return new HuggingFaceServiceSettings(randomAlphaOfLength(15)); + return createRandom(randomAlphaOfLength(15)); + } + + private static HuggingFaceServiceSettings createRandom(String url) { + SimilarityMeasure similarityMeasure = null; + Integer dims = null; + var isTextEmbeddingModel = randomBoolean(); + if (isTextEmbeddingModel) { + similarityMeasure = randomFrom(SimilarityMeasure.values()); + dims = randomIntBetween(32, 256); + } + Integer maxInputTokens = randomBoolean() ? null : randomIntBetween(128, 256); + return new HuggingFaceServiceSettings(ServiceUtils.createUri(url), similarityMeasure, dims, maxInputTokens); } public void testFromMap() { var url = "https://www.abc.com"; - var serviceSettings = HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, url))); - - assertThat(serviceSettings, is(new HuggingFaceServiceSettings(url))); + var similarity = SimilarityMeasure.DOT_PRODUCT; + var dims = 384; + var maxInputTokens = 128; + { + var serviceSettings = HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))); + assertThat(serviceSettings, is(new HuggingFaceServiceSettings(url))); + } + { + var serviceSettings = HuggingFaceServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + ServiceFields.SIMILARITY, + similarity.toString(), + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens + ) + ) + ); + assertThat(serviceSettings, is(new HuggingFaceServiceSettings(ServiceUtils.createUri(url), similarity, dims, maxInputTokens))); + } } public void testFromMap_MissingUrl_ThrowsError() { @@ -38,10 +74,7 @@ public void testFromMap_MissingUrl_ThrowsError() { assertThat( thrownException.getMessage(), containsString( - Strings.format( - "Validation Failed: 1: [service_settings] does not contain the required setting [%s];", - HuggingFaceServiceSettings.URL - ) + Strings.format("Validation Failed: 1: [service_settings] does not contain the required setting [%s];", ServiceFields.URL) ) ); } @@ -49,7 +82,7 @@ public void testFromMap_MissingUrl_ThrowsError() { public void testFromMap_EmptyUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, ""))) + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, ""))) ); assertThat( @@ -57,7 +90,7 @@ public void testFromMap_EmptyUrl_ThrowsError() { containsString( Strings.format( "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", - HuggingFaceServiceSettings.URL + ServiceFields.URL ) ) ); @@ -67,19 +100,24 @@ public void testFromMap_InvalidUrl_ThrowsError() { var url = "https://www.abc^.com"; var thrownException = expectThrows( ValidationException.class, - () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(HuggingFaceServiceSettings.URL, url))) + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))) ); assertThat( thrownException.getMessage(), - is( - Strings.format( - "Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", - url, - HuggingFaceServiceSettings.URL - ) - ) + is(Strings.format("Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", url, ServiceFields.URL)) + ); + } + + public void testFromMap_InvalidSimilarity_ThrowsError() { + var url = "https://www.abc.com"; + var similarity = "by_size"; + var thrownException = expectThrows( + ValidationException.class, + () -> HuggingFaceServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url, ServiceFields.SIMILARITY, similarity))) ); + + assertThat(thrownException.getMessage(), is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];")); } @Override @@ -100,7 +138,7 @@ protected HuggingFaceServiceSettings mutateInstance(HuggingFaceServiceSettings i public static Map getServiceSettingsMap(String url) { var map = new HashMap(); - map.put(HuggingFaceServiceSettings.URL, url); + map.put(ServiceFields.URL, url); return map; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java index 9e20286c1d0ff..81bbb4b041c51 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceSettingsTests.java @@ -12,6 +12,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; +import org.elasticsearch.xpack.inference.services.ServiceFields; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.HashMap; @@ -23,7 +26,7 @@ public class OpenAiServiceSettingsTests extends AbstractWireSerializingTestCase { public static OpenAiServiceSettings createRandomWithNonNullUrl() { - return new OpenAiServiceSettings(randomAlphaOfLength(15), randomAlphaOfLength(15)); + return createRandom(randomAlphaOfLength(15)); } /** @@ -31,18 +34,49 @@ public static OpenAiServiceSettings createRandomWithNonNullUrl() { */ public static OpenAiServiceSettings createRandom() { var url = randomBoolean() ? randomAlphaOfLength(15) : null; + return createRandom(url); + } + + private static OpenAiServiceSettings createRandom(String url) { var organizationId = randomBoolean() ? randomAlphaOfLength(15) : null; - return new OpenAiServiceSettings(url, organizationId); + SimilarityMeasure similarityMeasure = null; + Integer dims = null; + var isTextEmbeddingModel = randomBoolean(); + if (isTextEmbeddingModel) { + similarityMeasure = SimilarityMeasure.DOT_PRODUCT; + dims = 1536; + } + Integer maxInputTokens = randomBoolean() ? null : randomIntBetween(128, 256); + return new OpenAiServiceSettings(ServiceUtils.createUri(url), organizationId, similarityMeasure, dims, maxInputTokens); } public void testFromMap() { var url = "https://www.abc.com"; var org = "organization"; + var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); + var dims = 1536; + var maxInputTokens = 512; var serviceSettings = OpenAiServiceSettings.fromMap( - new HashMap<>(Map.of(OpenAiServiceSettings.URL, url, OpenAiServiceSettings.ORGANIZATION, org)) + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + OpenAiServiceSettings.ORGANIZATION, + org, + ServiceFields.SIMILARITY, + similarity, + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens + ) + ) ); - assertThat(serviceSettings, is(new OpenAiServiceSettings(url, org))); + assertThat( + serviceSettings, + is(new OpenAiServiceSettings(ServiceUtils.createUri(url), org, SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens)) + ); } public void testFromMap_MissingUrl_DoesNotThrowException() { @@ -54,7 +88,7 @@ public void testFromMap_MissingUrl_DoesNotThrowException() { public void testFromMap_EmptyUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceSettings.URL, ""))) + () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, ""))) ); assertThat( @@ -62,7 +96,7 @@ public void testFromMap_EmptyUrl_ThrowsError() { containsString( Strings.format( "Validation Failed: 1: [service_settings] Invalid value empty string. [%s] must be a non-empty string;", - OpenAiServiceSettings.URL + ServiceFields.URL ) ) ); @@ -95,21 +129,25 @@ public void testFromMap_InvalidUrl_ThrowsError() { var url = "https://www.abc^.com"; var thrownException = expectThrows( ValidationException.class, - () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(OpenAiServiceSettings.URL, url))) + () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))) ); assertThat( thrownException.getMessage(), - is( - Strings.format( - "Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", - url, - OpenAiServiceSettings.URL - ) - ) + is(Strings.format("Validation Failed: 1: [service_settings] Invalid url [%s] received for field [%s];", url, ServiceFields.URL)) ); } + public void testFromMap_InvalidSimilarity_ThrowsError() { + var similarity = "by_size"; + var thrownException = expectThrows( + ValidationException.class, + () -> OpenAiServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.SIMILARITY, similarity))) + ); + + assertThat(thrownException.getMessage(), is("Validation Failed: 1: [service_settings] Unknown similarity measure [by_size];")); + } + @Override protected Writeable.Reader instanceReader() { return OpenAiServiceSettings::new; @@ -129,7 +167,7 @@ public static Map getServiceSettingsMap(@Nullable String url, @N var map = new HashMap(); if (url != null) { - map.put(OpenAiServiceSettings.URL, url); + map.put(ServiceFields.URL, url); } if (org != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index 62cb609a59d2a..302aacdc30606 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; @@ -58,7 +59,7 @@ public static OpenAiEmbeddingsModel createModel( "id", TaskType.TEXT_EMBEDDING, "service", - new OpenAiServiceSettings(url, org), + new OpenAiServiceSettings(url, org, SimilarityMeasure.DOT_PRODUCT, 1536, null), new OpenAiEmbeddingsTaskSettings(modelName, user), new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); From b7344575df3d4cb13df720308821fc6d1e057891 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 5 Dec 2023 08:35:32 -0800 Subject: [PATCH 225/263] Introduce local block factory (#102901) Requesting and returning memory from a CircuitBreaker can be costly due to the involvement of read/write on one or several atomic longs. To address this issue, the local breaker adopts a strategy of over-requesting memory, utilizing the reserved amount for subsequent memory requests without direct access to the actual breaker. Before passing a Block to another Driver, it is necessary to switch the owning block factory to its parent, which is associated with the global breaker. This is done to bypass the local breaker when releasing memory, as the releasing thread can be any thread, not necessarily the one executing the Driver. There are two specific operators that need to change the owning block factory: SinkOperator (superset of ExchangeSinkOperator), which is the last operator of a Driver, and AsyncOperator, which can be responded by any thread in response. The optimization reduces the latency of the enrich operation in the nyc_taxis benchmark from 100ms to 50ms. When combined with #102902, it further reduces the latency to below 40ms, better than the previous performance before the regression. Relates #102625 --- docs/changelog/102901.yaml | 5 + .../compute/data/BooleanArrayBlock.java | 6 +- .../compute/data/BooleanArrayVector.java | 2 +- .../compute/data/BooleanBigArrayVector.java | 1 + .../compute/data/BooleanVectorBlock.java | 5 + .../compute/data/BytesRefArrayBlock.java | 6 +- .../compute/data/BytesRefArrayVector.java | 4 +- .../compute/data/BytesRefVectorBlock.java | 5 + .../compute/data/ConstantBooleanVector.java | 2 +- .../compute/data/ConstantBytesRefVector.java | 2 +- .../compute/data/ConstantDoubleVector.java | 2 +- .../compute/data/ConstantIntVector.java | 2 +- .../compute/data/ConstantLongVector.java | 2 +- .../compute/data/DoubleArrayBlock.java | 6 +- .../compute/data/DoubleArrayVector.java | 2 +- .../compute/data/DoubleBigArrayVector.java | 1 + .../compute/data/DoubleVectorBlock.java | 5 + .../compute/data/IntArrayBlock.java | 6 +- .../compute/data/IntArrayVector.java | 2 +- .../compute/data/IntBigArrayVector.java | 1 + .../compute/data/IntVectorBlock.java | 5 + .../compute/data/LongArrayBlock.java | 6 +- .../compute/data/LongArrayVector.java | 2 +- .../compute/data/LongBigArrayVector.java | 1 + .../compute/data/LongVectorBlock.java | 5 + .../compute/data/AbstractBlock.java | 7 +- .../compute/data/AbstractVector.java | 7 +- .../org/elasticsearch/compute/data/Block.java | 11 ++ .../compute/data/BlockFactory.java | 23 +++ .../compute/data/ConstantNullBlock.java | 4 +- .../elasticsearch/compute/data/DocBlock.java | 5 + .../elasticsearch/compute/data/DocVector.java | 7 + .../compute/data/LocalCircuitBreaker.java | 137 ++++++++++++++++++ .../org/elasticsearch/compute/data/Page.java | 12 +- .../elasticsearch/compute/data/Vector.java | 9 ++ .../compute/data/X-ArrayBlock.java.st | 8 +- .../compute/data/X-ArrayVector.java.st | 4 +- .../compute/data/X-BigArrayVector.java.st | 1 + .../compute/data/X-ConstantVector.java.st | 2 +- .../compute/data/X-VectorBlock.java.st | 5 + .../compute/operator/AsyncOperator.java | 10 +- .../compute/operator/OutputOperator.java | 2 +- .../operator/PageConsumerOperator.java | 2 +- .../compute/operator/SinkOperator.java | 9 ++ .../exchange/ExchangeSinkOperator.java | 5 +- .../compute/data/BlockFactoryTests.java | 80 ++++++++++ .../data/LocalCircuitBreakerTests.java | 121 ++++++++++++++++ .../compute/data/MockBlockFactory.java | 14 +- .../compute/operator/AsyncOperatorTests.java | 38 ++++- .../exchange/ExchangeServiceTests.java | 2 +- .../action/AbstractEsqlIntegTestCase.java | 11 ++ .../xpack/esql/action/EnrichIT.java | 3 + .../esql/action/EsqlActionBreakerIT.java | 3 + .../esql/enrich/EnrichLookupService.java | 31 +++- .../esql/planner/LocalExecutionPlanner.java | 46 ++++-- .../xpack/esql/plugin/ComputeService.java | 5 + .../esql/plugin/TransportEsqlQueryAction.java | 1 + .../elasticsearch/xpack/esql/CsvTests.java | 12 ++ .../planner/LocalExecutionPlannerTests.java | 1 + 59 files changed, 651 insertions(+), 73 deletions(-) create mode 100644 docs/changelog/102901.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/LocalCircuitBreaker.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LocalCircuitBreakerTests.java diff --git a/docs/changelog/102901.yaml b/docs/changelog/102901.yaml new file mode 100644 index 0000000000000..ac417691b525c --- /dev/null +++ b/docs/changelog/102901.yaml @@ -0,0 +1,5 @@ +pr: 102901 +summary: Introduce local block factory +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index 844a8bc1b7290..7c2723163197a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -50,7 +50,7 @@ public boolean getBoolean(int valueIndex) { @Override public BooleanBlock filter(int... positions) { - try (var builder = blockFactory.newBooleanBlockBuilder(positions.length)) { + try (var builder = blockFactory().newBooleanBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -84,7 +84,7 @@ public BooleanBlock expand() { return this; } // TODO use reference counting to share the values - try (var builder = blockFactory.newBooleanBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newBooleanBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -137,6 +137,6 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 8ad4196c57997..5aa8724eb0ca2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -55,7 +55,7 @@ public boolean isConstant() { @Override public BooleanVector filter(int... positions) { - try (BooleanVector.Builder builder = blockFactory.newBooleanVectorBuilder(positions.length)) { + try (BooleanVector.Builder builder = blockFactory().newBooleanVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendBoolean(values[pos]); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 5ad88ab1ac6e9..2621ec612944e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -60,6 +60,7 @@ public long ramBytesUsed() { @Override public BooleanVector filter(int... positions) { + var blockFactory = blockFactory(); final BitArray filtered = new BitArray(positions.length, blockFactory.bigArrays()); for (int i = 0; i < positions.length; i++) { if (values.get(positions[i])) { diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index c5c3a24736c16..19c551d85617f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -83,4 +83,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 2ae412b3867a0..34d4e5aaa43e2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -53,7 +53,7 @@ public BytesRef getBytesRef(int valueIndex, BytesRef dest) { @Override public BytesRefBlock filter(int... positions) { final BytesRef scratch = new BytesRef(); - try (var builder = blockFactory.newBytesRefBlockBuilder(positions.length)) { + try (var builder = blockFactory().newBytesRefBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -88,7 +88,7 @@ public BytesRefBlock expand() { } // TODO use reference counting to share the values final BytesRef scratch = new BytesRef(); - try (var builder = blockFactory.newBytesRefBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newBytesRefBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -141,7 +141,7 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 02ab9a09b15e1..a8bb60f9f20fa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -57,7 +57,7 @@ public boolean isConstant() { @Override public BytesRefVector filter(int... positions) { final var scratch = new BytesRef(); - try (BytesRefVector.Builder builder = blockFactory.newBytesRefVectorBuilder(positions.length)) { + try (BytesRefVector.Builder builder = blockFactory().newBytesRefVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendBytesRef(values.get(pos, scratch)); } @@ -98,7 +98,7 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index d8c2c615a3dfb..e834a1c171e49 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -84,4 +84,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index e4f6e6f144abe..b636d89a206e0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -84,6 +84,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index f5f6e7945d03b..be34db592b228 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -89,6 +89,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index 05e71f3853155..f6cce49aa3d42 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -84,6 +84,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index 3f1eb45843c66..fa7b9223d5107 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -84,6 +84,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 08840c3772a9e..21d4d81dfd193 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -84,6 +84,6 @@ public void close() { throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index a8dbb5ba1d963..db3546c73c054 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -50,7 +50,7 @@ public double getDouble(int valueIndex) { @Override public DoubleBlock filter(int... positions) { - try (var builder = blockFactory.newDoubleBlockBuilder(positions.length)) { + try (var builder = blockFactory().newDoubleBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -84,7 +84,7 @@ public DoubleBlock expand() { return this; } // TODO use reference counting to share the values - try (var builder = blockFactory.newDoubleBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newDoubleBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -137,6 +137,6 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 69cf686a1576a..08e51b0e313d8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -55,7 +55,7 @@ public boolean isConstant() { @Override public DoubleVector filter(int... positions) { - try (DoubleVector.Builder builder = blockFactory.newDoubleVectorBuilder(positions.length)) { + try (DoubleVector.Builder builder = blockFactory().newDoubleVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendDouble(values[pos]); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index d50e8adbbd37d..476b94ad3fa05 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -60,6 +60,7 @@ public long ramBytesUsed() { @Override public DoubleVector filter(int... positions) { + var blockFactory = blockFactory(); final DoubleArray filtered = blockFactory.bigArrays().newDoubleArray(positions.length, true); for (int i = 0; i < positions.length; i++) { filtered.set(i, values.get(positions[i])); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index ac4c826b5f2d2..62319e9c100cb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -83,4 +83,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index bc41b83eca375..111fc0c757af1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -50,7 +50,7 @@ public int getInt(int valueIndex) { @Override public IntBlock filter(int... positions) { - try (var builder = blockFactory.newIntBlockBuilder(positions.length)) { + try (var builder = blockFactory().newIntBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -84,7 +84,7 @@ public IntBlock expand() { return this; } // TODO use reference counting to share the values - try (var builder = blockFactory.newIntBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newIntBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -137,6 +137,6 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index 90766a9a67d81..9c8c27efa0806 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -55,7 +55,7 @@ public boolean isConstant() { @Override public IntVector filter(int... positions) { - try (IntVector.Builder builder = blockFactory.newIntVectorBuilder(positions.length)) { + try (IntVector.Builder builder = blockFactory().newIntVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendInt(values[pos]); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index 3bb9461300ee4..76d2797f2a64b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -60,6 +60,7 @@ public long ramBytesUsed() { @Override public IntVector filter(int... positions) { + var blockFactory = blockFactory(); final IntArray filtered = blockFactory.bigArrays().newIntArray(positions.length, true); for (int i = 0; i < positions.length; i++) { filtered.set(i, values.get(positions[i])); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 60280ebb13064..ccc242dd1a573 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -83,4 +83,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index be10a517b7df0..9e0fa9bcc2993 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -50,7 +50,7 @@ public long getLong(int valueIndex) { @Override public LongBlock filter(int... positions) { - try (var builder = blockFactory.newLongBlockBuilder(positions.length)) { + try (var builder = blockFactory().newLongBlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -84,7 +84,7 @@ public LongBlock expand() { return this; } // TODO use reference counting to share the values - try (var builder = blockFactory.newLongBlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().newLongBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -137,6 +137,6 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index b476556ce27fa..0a3ada321d94c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -55,7 +55,7 @@ public boolean isConstant() { @Override public LongVector filter(int... positions) { - try (LongVector.Builder builder = blockFactory.newLongVectorBuilder(positions.length)) { + try (LongVector.Builder builder = blockFactory().newLongVectorBuilder(positions.length)) { for (int pos : positions) { builder.appendLong(values[pos]); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index ccf4a6944b60e..2101b606e9a90 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -60,6 +60,7 @@ public long ramBytesUsed() { @Override public LongVector filter(int... positions) { + var blockFactory = blockFactory(); final LongArray filtered = blockFactory.bigArrays().newLongArray(positions.length, true); for (int i = 0; i < positions.length; i++) { filtered.set(i, values.get(positions[i])); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index c9b65ba3e9029..94697b3136fce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -83,4 +83,9 @@ public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java index 39f17cfecab1a..177e3fb6798d1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java @@ -21,7 +21,7 @@ abstract class AbstractBlock implements Block { @Nullable protected final BitSet nullsMask; - protected final BlockFactory blockFactory; + private BlockFactory blockFactory; /** * @param positionCount the number of values in this block @@ -95,6 +95,11 @@ public BlockFactory blockFactory() { return blockFactory; } + @Override + public void allowPassingToDifferentDriver() { + blockFactory = blockFactory.parent(); + } + @Override public boolean isReleased() { return hasReferences() == false; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java index d7bda952bdcd0..33ef14cfb4ad8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java @@ -13,7 +13,7 @@ abstract class AbstractVector implements Vector { private final int positionCount; - protected final BlockFactory blockFactory; + private BlockFactory blockFactory; protected boolean released; protected AbstractVector(int positionCount, BlockFactory blockFactory) { @@ -35,6 +35,11 @@ public BlockFactory blockFactory() { return blockFactory; } + @Override + public void allowPassingToDifferentDriver() { + blockFactory = blockFactory.parent(); + } + @Override public void close() { if (released) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 1d8c548d90571..964e510de9a20 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -62,8 +62,17 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R ElementType elementType(); /** The block factory associated with this block. */ + // TODO: renaming this to owning blockFactory once we pass blockFactory for filter and expand BlockFactory blockFactory(); + /** + * Before passing a Block to another Driver, it is necessary to switch the owning block factory to its parent, which is associated + * with the global circuit breaker. This ensures that when the new driver releases this Block, it returns memory directly to the + * parent block factory instead of the local block factory of this Block. This is important because the local block factory is + * not thread safe and doesn't support simultaneous access by more than one thread. + */ + void allowPassingToDifferentDriver(); + /** * Tells if this block has been released. A block is released by calling its {@link Block#close()} or {@link Block#decRef()} methods. * @return true iff the block's reference count is zero. @@ -102,6 +111,7 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R * The new block may hold a reference to this block, increasing this block's reference count. * @param positions the positions to retain * @return a filtered block + * TODO: pass BlockFactory */ Block filter(int... positions); @@ -145,6 +155,7 @@ default boolean mvSortedAscending() { /** * Expand multivalued fields into one row per value. Returns the same block if there aren't any multivalued * fields to expand. The returned block needs to be closed by the caller to release the block's resources. + * TODO: pass BlockFactory */ Block expand(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 88616e7fc95fd..092f66a7d4427 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block.MvOrdering; @@ -18,6 +19,11 @@ import java.util.BitSet; public class BlockFactory { + public static final String LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING = "esql.block_factory.local_breaker.over_reserved"; + public static final ByteSizeValue LOCAL_BREAKER_OVER_RESERVED_DEFAULT_SIZE = ByteSizeValue.ofKb(4); + + public static final String LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING = "esql.block_factory.local_breaker.max_over_reserved"; + public static final ByteSizeValue LOCAL_BREAKER_OVER_RESERVED_DEFAULT_MAX_SIZE = ByteSizeValue.ofKb(16); private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( new NoopCircuitBreaker("noop-esql-breaker"), @@ -27,10 +33,16 @@ public class BlockFactory { private final CircuitBreaker breaker; private final BigArrays bigArrays; + private final BlockFactory parent; public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { + this(breaker, bigArrays, null); + } + + protected BlockFactory(CircuitBreaker breaker, BigArrays bigArrays, BlockFactory parent) { this.breaker = breaker; this.bigArrays = bigArrays; + this.parent = parent; } /** @@ -54,6 +66,17 @@ public BigArrays bigArrays() { return bigArrays; } + protected BlockFactory parent() { + return parent != null ? parent : this; + } + + public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { + if (childBreaker.parentBreaker() != breaker) { + throw new IllegalStateException("Different parent breaker"); + } + return new BlockFactory(childBreaker, bigArrays, this); + } + /** * Adjust the circuit breaker with the given delta, if the delta is negative, the breaker will * be adjusted without tripping. If the data was already created before calling this method, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 03cfa2d940efd..29e39f43cddc2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -69,7 +69,7 @@ public ElementType elementType() { @Override public ConstantNullBlock filter(int... positions) { - return (ConstantNullBlock) blockFactory.newConstantNullBlock(positions.length); + return (ConstantNullBlock) blockFactory().newConstantNullBlock(positions.length); } public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -128,7 +128,7 @@ public String toString() { @Override public void closeInternal() { - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } static class Builder implements Block.Builder { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index 9dc27196bd128..d45314f5c8a78 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -167,4 +167,9 @@ public void close() { Releasables.closeExpectNoException(shards, segments, docs); } } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 24c656404e89f..3097dc73fb814 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -223,6 +223,13 @@ public long ramBytesUsed() { return ramBytesEstimated(shards, segments, docs, shardSegmentDocMapForwards, shardSegmentDocMapBackwards); } + @Override + public void allowPassingToDifferentDriver() { + shards.allowPassingToDifferentDriver(); + segments.allowPassingToDifferentDriver(); + docs.allowPassingToDifferentDriver(); + } + @Override public void close() { released = true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/LocalCircuitBreaker.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/LocalCircuitBreaker.java new file mode 100644 index 0000000000000..4d134963f12e7 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/LocalCircuitBreaker.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Releasable; + +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Requesting and returning memory from a {@link CircuitBreaker} can be costly due to the involvement of read/write + * on one or several atomic longs. To address this issue, the local breaker adopts a strategy of over-requesting memory, + * utilizing the reserved amount for subsequent memory requests without direct access to the actual breaker. + * + * @see BlockFactory#newChildFactory(LocalCircuitBreaker) + * @see Block#allowPassingToDifferentDriver() + */ +public final class LocalCircuitBreaker implements CircuitBreaker, Releasable { + private final CircuitBreaker breaker; + private final long overReservedBytes; + private final long maxOverReservedBytes; + private long reservedBytes; + private final AtomicBoolean closed = new AtomicBoolean(false); + + public record SizeSettings(long overReservedBytes, long maxOverReservedBytes) { + public SizeSettings(Settings settings) { + this( + settings.getAsBytesSize( + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_DEFAULT_SIZE + ).getBytes(), + settings.getAsBytesSize( + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_DEFAULT_MAX_SIZE + ).getBytes() + ); + } + } + + public LocalCircuitBreaker(CircuitBreaker breaker, long overReservedBytes, long maxOverReservedBytes) { + this.breaker = breaker; + this.maxOverReservedBytes = maxOverReservedBytes; + this.overReservedBytes = Math.min(overReservedBytes, maxOverReservedBytes); + } + + @Override + public void circuitBreak(String fieldName, long bytesNeeded) { + breaker.circuitBreak(fieldName, bytesNeeded); + } + + @Override + public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { + if (bytes <= reservedBytes) { + reservedBytes -= bytes; + maybeReduceReservedBytes(); + } else { + breaker.addEstimateBytesAndMaybeBreak(bytes - reservedBytes + overReservedBytes, label); + reservedBytes = overReservedBytes; + } + } + + @Override + public void addWithoutBreaking(long bytes) { + if (bytes <= reservedBytes) { + reservedBytes -= bytes; + maybeReduceReservedBytes(); + } else { + // leave the reserve untouched as we are making a call anyway + breaker.addWithoutBreaking(bytes); + } + } + + private void maybeReduceReservedBytes() { + if (reservedBytes > maxOverReservedBytes) { + breaker.addWithoutBreaking(maxOverReservedBytes - reservedBytes); + reservedBytes = maxOverReservedBytes; + } + } + + public CircuitBreaker parentBreaker() { + return breaker; + } + + @Override + public long getUsed() { + return breaker.getUsed(); + } + + // for testings + long getReservedBytes() { + return reservedBytes; + } + + @Override + public long getLimit() { + return breaker.getLimit(); + } + + @Override + public double getOverhead() { + return breaker.getOverhead(); + } + + @Override + public long getTrippedCount() { + return breaker.getTrippedCount(); + } + + @Override + public String getName() { + return breaker.getName(); + } + + @Override + public Durability getDurability() { + return breaker.getDurability(); + } + + @Override + public void setLimitAndOverhead(long limit, double overhead) { + breaker.setLimitAndOverhead(limit, overhead); + } + + @Override + public void close() { + if (closed.compareAndSet(false, true)) { + breaker.addWithoutBreaking(-reservedBytes); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java index de6b5385ab167..a9903046bb4ed 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Page.java @@ -236,7 +236,15 @@ public void releaseBlocks() { Releasables.closeExpectNoException(blocks); } - static int mapSize(int expectedSize) { - return expectedSize < 2 ? expectedSize + 1 : (int) (expectedSize / 0.75 + 1.0); + /** + * Before passing a Page to another Driver, it is necessary to switch the owning block factories of its Blocks to their parents, + * which are associated with the global circuit breaker. This ensures that when the new driver releases this Page, it returns + * memory directly to the parent block factory instead of the local block factory. This is important because the local block + * factory is not thread safe and doesn't support simultaneous access by more than one thread. + */ + public void allowPassingToDifferentDriver() { + for (Block block : blocks) { + block.allowPassingToDifferentDriver(); + } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index e2cea86a5a38f..0ca06498f7129 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -48,8 +48,17 @@ public interface Vector extends Accountable, Releasable { boolean isConstant(); /** The block factory associated with this vector. */ + // TODO: Renaming this to owningBlockFactory BlockFactory blockFactory(); + /** + * Before passing a Vector to another Driver, it is necessary to switch the owning block factory to its parent, which is associated + * with the global circuit breaker. This ensures that when the new driver releases this Vector, it returns memory directly to the + * parent block factory instead of the local block factory of this Block. This is important because the local block factory is + * not thread safe and doesn't support simultaneous access by more than one thread. + */ + void allowPassingToDifferentDriver(); + /** * Builds {@link Vector}s. Typically, you use one of it's direct supinterfaces like {@link IntVector.Builder}. * This is {@link Releasable} and should be released after building the vector or if building the vector fails. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 72123764e9b55..03397e1a2e5ad 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -71,7 +71,7 @@ $endif$ $if(BytesRef)$ final BytesRef scratch = new BytesRef(); $endif$ - try (var builder = blockFactory.new$Type$BlockBuilder(positions.length)) { + try (var builder = blockFactory().new$Type$BlockBuilder(positions.length)) { for (int pos : positions) { if (isNull(pos)) { builder.appendNull(); @@ -108,7 +108,7 @@ $endif$ $if(BytesRef)$ final BytesRef scratch = new BytesRef(); $endif$ - try (var builder = blockFactory.new$Type$BlockBuilder(firstValueIndexes[getPositionCount()])) { + try (var builder = blockFactory().new$Type$BlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { builder.appendNull(); @@ -171,10 +171,10 @@ $endif$ @Override public void closeInternal() { $if(BytesRef)$ - blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); $else$ - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); $endif$ } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 02a876142fb0d..4dd903945d04f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -79,7 +79,7 @@ $endif$ $if(BytesRef)$ final var scratch = new BytesRef(); $endif$ - try ($Type$Vector.Builder builder = blockFactory.new$Type$VectorBuilder(positions.length)) { + try ($Type$Vector.Builder builder = blockFactory().new$Type$VectorBuilder(positions.length)) { for (int pos : positions) { $if(BytesRef)$ builder.append$Type$(values.get(pos, scratch)); @@ -129,7 +129,7 @@ $if(BytesRef)$ throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } $endif$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index 5bf629cec61d3..6a231d9ff6bf3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -60,6 +60,7 @@ public final class $Type$BigArrayVector extends AbstractVector implements $Type$ @Override public $Type$Vector filter(int... positions) { + var blockFactory = blockFactory(); $if(boolean)$ final BitArray filtered = new BitArray(positions.length, blockFactory.bigArrays()); for (int i = 0; i < positions.length; i++) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index b80188cefba2e..f685d38d6459b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -108,6 +108,6 @@ $endif$ throw new IllegalStateException("can't release already released vector [" + this + "]"); } released = true; - blockFactory.adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsed(), true); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index 89bc84d551b63..91b6bb0ffac87 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -91,4 +91,9 @@ $endif$ assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; Releasables.closeExpectNoException(vector); } + + @Override + public void allowPassingToDifferentDriver() { + vector.allowPassingToDifferentDriver(); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java index 1835bea60de24..98ba37e3f32d1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AsyncOperator.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.core.Releasables; import org.elasticsearch.index.seqno.LocalCheckpointTracker; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.tasks.TaskCancelledException; @@ -78,7 +77,7 @@ public void addInput(Page input) { buffers.put(seqNo, output); onSeqNoCompleted(seqNo); }, e -> { - input.releaseBlocks(); + releasePageOnAnyThread(input); onFailure(e); onSeqNoCompleted(seqNo); }); @@ -91,6 +90,11 @@ public void addInput(Page input) { } } + private void releasePageOnAnyThread(Page page) { + page.allowPassingToDifferentDriver(); + page.releaseBlocks(); + } + /** * Performs an external computation and notify the listener when the result is ready. * @@ -157,7 +161,7 @@ private void discardPages() { Page page = buffers.remove(nextCheckpoint); checkpoint.markSeqNoAsPersisted(nextCheckpoint); if (page != null) { - Releasables.closeExpectNoException(page::releaseBlocks); + releasePageOnAnyThread(page); } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OutputOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OutputOperator.java index 6d5f914b74eb3..fd70a72cc3255 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OutputOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OutputOperator.java @@ -62,7 +62,7 @@ public boolean needsInput() { } @Override - public void addInput(Page page) { + protected void doAddInput(Page page) { pageConsumer.accept(mapper.apply(page)); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/PageConsumerOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/PageConsumerOperator.java index 3d53a09856c1f..ee1e61d74bfc6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/PageConsumerOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/PageConsumerOperator.java @@ -40,7 +40,7 @@ public boolean needsInput() { } @Override - public void addInput(Page page) { + protected void doAddInput(Page page) { pageConsumer.accept(page); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SinkOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SinkOperator.java index 93c170cbcfc8a..0751abf4562a7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SinkOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/SinkOperator.java @@ -23,6 +23,15 @@ public final Page getOutput() { throw new UnsupportedOperationException(); } + protected abstract void doAddInput(Page page); + + @Override + public final void addInput(Page page) { + // We need to change the ownership of the blocks of the input page before passing them to another driver. + page.allowPassingToDifferentDriver(); + doAddInput(page); + } + /** * A factory for creating sink operators. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java index 0fb6ec6f63d96..fed0b2de4454b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperator.java @@ -73,10 +73,9 @@ public boolean needsInput() { } @Override - public void addInput(Page page) { + protected void doAddInput(Page page) { pagesAccepted++; - var newPage = transformer.apply(page); - sink.addPage(newPage); + sink.addPage(transformer.apply(page)); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java index 88a584ac5ee44..27b0380ecfea0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -27,12 +27,16 @@ import java.util.BitSet; import java.util.List; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -568,6 +572,82 @@ public void testReleaseVector() { assertThat(breaker.getUsed(), equalTo(0L)); } + public void testParent() { + long overLimit = between(1, 10); + long maxOverLimit = randomLongBetween(overLimit, 1000); + LocalCircuitBreaker localBreaker = new LocalCircuitBreaker(blockFactory.breaker(), overLimit, maxOverLimit); + BlockFactory childFactory = blockFactory.newChildFactory(localBreaker); + assertThat(childFactory.parent(), sameInstance(blockFactory)); + assertThat(blockFactory.parent(), sameInstance(blockFactory)); + localBreaker.close(); + } + + private Block randomBlock(BlockFactory blockFactory, int positionCount) { + return BasicBlockTests.randomBlock( + blockFactory, + randomFrom(ElementType.BYTES_REF, ElementType.LONG, ElementType.BOOLEAN), + positionCount, + randomBoolean(), + between(0, 1), + between(1, 3), + between(0, 1), + between(1, 3) + ).block(); + } + + public void testAllowPassingBlockToDifferentContext() throws Exception { + long overLimit1 = between(0, 10 * 1024); + long maxOverLimit1 = randomLongBetween(overLimit1, 100 * 1024); + LocalCircuitBreaker localBreaker1 = new LocalCircuitBreaker(blockFactory.breaker(), overLimit1, maxOverLimit1); + long overLimit2 = between(0, 10 * 1024); + long maxOverLimit2 = randomLongBetween(overLimit1, 100 * 1024); + LocalCircuitBreaker localBreaker2 = new LocalCircuitBreaker(blockFactory.breaker(), overLimit2, maxOverLimit2); + BlockFactory childFactory1 = blockFactory.newChildFactory(localBreaker1); + BlockFactory childFactory2 = blockFactory.newChildFactory(localBreaker2); + Thread[] releasingThreads = new Thread[between(1, 4)]; + Page[] passedPages = new Page[releasingThreads.length]; + for (int i = 0; i < passedPages.length; i++) { + int positionCount = between(1, 100); + Block[] blocks = new Block[between(1, 10)]; + for (int b = 0; b < blocks.length; b++) { + blocks[b] = randomBlock(randomFrom(childFactory1, childFactory2), positionCount); + blocks[b].allowPassingToDifferentDriver(); + assertThat(blocks[b].blockFactory(), equalTo(blockFactory)); + } + passedPages[i] = new Page(blocks); + } + Block[] localBlocks = new Block[between(1, 100)]; + for (int i = 0; i < localBlocks.length; i++) { + BlockFactory childFactory = randomFrom(childFactory1, childFactory2); + localBlocks[i] = randomBlock(childFactory, between(1, 100)); + assertThat(localBlocks[i].blockFactory(), equalTo(childFactory)); + } + CyclicBarrier barrier = new CyclicBarrier(releasingThreads.length + 1); + for (int i = 0; i < releasingThreads.length; i++) { + int threadIndex = i; + releasingThreads[threadIndex] = new Thread(() -> { + try { + barrier.await(30, TimeUnit.SECONDS); + passedPages[threadIndex].releaseBlocks(); + } catch (Exception e) { + throw new AssertionError(e); + } + }); + releasingThreads[threadIndex].start(); + } + barrier.await(30, TimeUnit.SECONDS); + for (Block block : localBlocks) { + block.close(); + } + for (Thread releasingThread : releasingThreads) { + releasingThread.join(); + } + assertThat(localBreaker1.getReservedBytes(), lessThanOrEqualTo(maxOverLimit1)); + assertThat(localBreaker2.getReservedBytes(), lessThanOrEqualTo(maxOverLimit2)); + localBreaker1.close(); + localBreaker2.close(); + } + static BytesRef randomBytesRef() { return new BytesRef(randomByteArrayOfLength(between(1, 20))); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LocalCircuitBreakerTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LocalCircuitBreakerTests.java new file mode 100644 index 0000000000000..a95c78df83646 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LocalCircuitBreakerTests.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; + +public class LocalCircuitBreakerTests extends ESTestCase { + + static class TrackingCircuitBreaker implements CircuitBreaker { + private final CircuitBreaker breaker; + private final AtomicInteger called = new AtomicInteger(); + + TrackingCircuitBreaker(CircuitBreaker breaker) { + this.breaker = breaker; + } + + @Override + public void circuitBreak(String fieldName, long bytesNeeded) { + + } + + @Override + public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException { + called.incrementAndGet(); + breaker.addEstimateBytesAndMaybeBreak(bytes, label); + } + + @Override + public void addWithoutBreaking(long bytes) { + called.incrementAndGet(); + breaker.addWithoutBreaking(bytes); + } + + @Override + public long getUsed() { + return breaker.getUsed(); + } + + @Override + public long getLimit() { + return breaker.getLimit(); + } + + @Override + public double getOverhead() { + return breaker.getOverhead(); + } + + @Override + public long getTrippedCount() { + return breaker.getTrippedCount(); + } + + @Override + public String getName() { + return breaker.getName(); + } + + @Override + public Durability getDurability() { + return breaker.getDurability(); + } + + @Override + public void setLimitAndOverhead(long limit, double overhead) { + breaker.setLimitAndOverhead(limit, overhead); + } + + int callTimes() { + return called.get(); + } + } + + private TrackingCircuitBreaker newTestBreaker(long limit) { + var bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofBytes(limit)).withCircuitBreaking(); + return new TrackingCircuitBreaker(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST)); + } + + public void testBasic() { + TrackingCircuitBreaker breaker = newTestBreaker(120); + LocalCircuitBreaker localBreaker = new LocalCircuitBreaker(breaker, 30, 50); + localBreaker.addEstimateBytesAndMaybeBreak(20, "test"); + assertThat(localBreaker.getReservedBytes(), equalTo(30L)); + assertThat(breaker.callTimes(), equalTo(1)); + assertThat(breaker.getUsed(), equalTo(50L)); + localBreaker.addWithoutBreaking(-5); + assertThat(breaker.getUsed(), equalTo(50L)); + assertThat(localBreaker.getReservedBytes(), equalTo(35L)); + localBreaker.addEstimateBytesAndMaybeBreak(25, "test"); + assertThat(breaker.getUsed(), equalTo(50L)); + assertThat(breaker.callTimes(), equalTo(1)); + assertThat(localBreaker.getReservedBytes(), equalTo(10L)); + var error = expectThrows(CircuitBreakingException.class, () -> localBreaker.addEstimateBytesAndMaybeBreak(60, "test")); + assertThat(error.getBytesWanted(), equalTo(80L)); + assertThat(breaker.callTimes(), equalTo(2)); + localBreaker.addEstimateBytesAndMaybeBreak(30, "test"); + assertThat(breaker.getUsed(), equalTo(100L)); + assertThat(localBreaker.getReservedBytes(), equalTo(30L)); + assertThat(breaker.callTimes(), equalTo(3)); + localBreaker.addWithoutBreaking(-40L); + assertThat(breaker.getUsed(), equalTo(80L)); + assertThat(localBreaker.getReservedBytes(), equalTo(50L)); + assertThat(breaker.callTimes(), equalTo(4)); + localBreaker.close(); + assertThat(breaker.getUsed(), equalTo(30L)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java index 8183a055f2998..35623b93357df 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java @@ -63,7 +63,19 @@ public void ensureAllBlocksAreReleased() { } public MockBlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { - super(breaker, bigArrays); + this(breaker, bigArrays, null); + } + + protected MockBlockFactory(CircuitBreaker breaker, BigArrays bigArrays, BlockFactory parent) { + super(breaker, bigArrays, parent); + } + + @Override + public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { + if (childBreaker.parentBreaker() != breaker()) { + throw new IllegalStateException("Different parent breaker"); + } + return new MockBlockFactory(childBreaker, bigArrays(), this); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index 290a16f83ed38..8cd7116677fd0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -23,11 +23,13 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -69,7 +71,16 @@ public void shutdownThreadPool() { } public void testBasic() { - DriverContext driverContext = driverContext(); + BlockFactory globalBlockFactory = blockFactory(); + LocalCircuitBreaker localBreaker = null; + final DriverContext driverContext; + if (randomBoolean()) { + localBreaker = new LocalCircuitBreaker(globalBlockFactory.breaker(), between(0, 1024), between(0, 4096)); + BlockFactory localFactory = new BlockFactory(localBreaker, globalBlockFactory.bigArrays()); + driverContext = new DriverContext(globalBlockFactory.bigArrays(), localFactory); + } else { + driverContext = new DriverContext(globalBlockFactory.bigArrays(), globalBlockFactory); + } int positions = randomIntBetween(0, 10_000); List ids = new ArrayList<>(positions); Map dict = new HashMap<>(); @@ -98,7 +109,7 @@ protected Page createPage(int positionOffset, int length) { }; int maxConcurrentRequests = randomIntBetween(1, 10); AsyncOperator asyncOperator = new AsyncOperator(driverContext, maxConcurrentRequests) { - final LookupService lookupService = new LookupService(threadPool, driverContext.blockFactory(), dict, maxConcurrentRequests); + final LookupService lookupService = new LookupService(threadPool, globalBlockFactory, dict, maxConcurrentRequests); @Override protected void performAsync(Page inputPage, ActionListener listener) { @@ -143,10 +154,12 @@ public void doClose() { Driver driver = new Driver(driverContext, sourceOperator, intermediateOperators, outputOperator, () -> assertFalse(it.hasNext())); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 10000), future); future.actionGet(); + Releasables.close(localBreaker); } public void testStatus() { - DriverContext driverContext = driverContext(); + BlockFactory blockFactory = blockFactory(); + DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory); Map> handlers = new HashMap<>(); AsyncOperator operator = new AsyncOperator(driverContext, 2) { @Override @@ -195,7 +208,16 @@ protected void doClose() { } public void testFailure() throws Exception { - DriverContext driverContext = driverContext(); + BlockFactory globalBlockFactory = blockFactory(); + LocalCircuitBreaker localBreaker = null; + final DriverContext driverContext; + if (randomBoolean()) { + localBreaker = new LocalCircuitBreaker(globalBlockFactory.breaker(), between(0, 1024), between(0, 4096)); + BlockFactory localFactory = new BlockFactory(localBreaker, globalBlockFactory.bigArrays()); + driverContext = new DriverContext(globalBlockFactory.bigArrays(), localFactory); + } else { + driverContext = new DriverContext(globalBlockFactory.bigArrays(), globalBlockFactory); + } final SequenceLongBlockSourceOperator sourceOperator = new SequenceLongBlockSourceOperator( driverContext.blockFactory(), LongStream.range(0, 100 * 1024) @@ -213,7 +235,7 @@ protected void doRun() { throw new ElasticsearchException("simulated"); } int positionCount = inputPage.getBlock(0).getPositionCount(); - IntBlock block = driverContext.blockFactory().newConstantIntBlockWith(between(1, 100), positionCount); + IntBlock block = globalBlockFactory.newConstantIntBlockWith(between(1, 100), positionCount); listener.onResponse(inputPage.appendPage(new Page(block))); } }; @@ -232,7 +254,7 @@ protected void doClose() { }; SinkOperator outputOperator = new PageConsumerOperator(Page::releaseBlocks); PlainActionFuture future = new PlainActionFuture<>(); - Driver driver = new Driver(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, () -> {}); + Driver driver = new Driver(driverContext, sourceOperator, List.of(asyncOperator), outputOperator, localBreaker); Driver.start(threadPool.getThreadContext(), threadPool.executor(ESQL_TEST_EXECUTOR), driver, between(1, 1000), future); assertBusy(() -> assertTrue(future.isDone())); if (failed.get()) { @@ -290,13 +312,13 @@ protected void doRun() { } } - protected DriverContext driverContext() { + protected BlockFactory blockFactory() { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); BlockFactory factory = new MockBlockFactory(breaker, bigArrays); blockFactories.add(factory); - return new DriverContext(bigArrays, factory); + return factory; } private final List breakers = new ArrayList<>(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index f44131c006b94..74e83017e03bf 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -225,7 +225,7 @@ public boolean needsInput() { } @Override - public void addInput(Page page) { + protected void doAddInput(Page page) { try { assertFalse("already finished", finished); IntBlock block = page.getBlock(0); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 768353a1c8d35..9b5012e56a3ff 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; @@ -69,6 +70,16 @@ public List> getSettings() { ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueSeconds(5), Setting.Property.NodeScope + ), + Setting.byteSizeSetting( + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, + ByteSizeValue.ofBytes(randomIntBetween(0, 4096)), + Setting.Property.NodeScope + ), + Setting.byteSizeSetting( + BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, + ByteSizeValue.ofBytes(randomIntBetween(0, 16 * 1024)), + Setting.Property.NodeScope ) ); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java index 46aaa6fab16a5..daefa8899b443 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EnrichIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -100,6 +101,8 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getDefault(Settings.EMPTY) ) .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) + .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 256))) + .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 1024))) // allow reading pages from network can trip the circuit breaker .put(IGNORE_DESERIALIZATION_ERRORS_SETTING.getKey(), true) .build(); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 3e8ac6fc3d5fb..5e1c3128d4076 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -68,6 +69,8 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING.getDefault(Settings.EMPTY) ) .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) + .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 256))) + .put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(between(0, 1024))) // allow reading pages from network can trip the circuit breaker .put(IGNORE_DESERIALIZATION_ERRORS_SETTING.getKey(), true) .build(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 0f8fd70c3016b..da305da3ea84d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -29,6 +29,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.BlockReaderFactories; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; @@ -114,6 +115,7 @@ public class EnrichLookupService { private final Executor executor; private final BigArrays bigArrays; private final BlockFactory blockFactory; + private final LocalCircuitBreaker.SizeSettings localBreakerSettings; public EnrichLookupService( ClusterService clusterService, @@ -128,6 +130,7 @@ public EnrichLookupService( this.executor = transportService.getThreadPool().executor(EsqlPlugin.ESQL_THREAD_POOL_NAME); this.bigArrays = bigArrays; this.blockFactory = blockFactory; + this.localBreakerSettings = new LocalCircuitBreaker.SizeSettings(clusterService.getSettings()); transportService.registerRequestHandler( LOOKUP_ACTION_NAME, this.executor, @@ -238,6 +241,7 @@ private void doLookup( ActionListener listener ) { Block inputBlock = inputPage.getBlock(0); + LocalCircuitBreaker localBreaker = null; try { if (inputBlock.areAllValuesNull()) { listener.onResponse(createNullResponse(inputPage.getPositionCount(), extractFields)); @@ -246,18 +250,23 @@ private void doLookup( ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shardId, 0, AliasFilter.EMPTY); SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, SearchService.NO_TIMEOUT); listener = ActionListener.runBefore(listener, searchContext::close); + localBreaker = new LocalCircuitBreaker( + blockFactory.breaker(), + localBreakerSettings.overReservedBytes(), + localBreakerSettings.maxOverReservedBytes() + ); + DriverContext driverContext = new DriverContext(bigArrays, blockFactory.newChildFactory(localBreaker)); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); MappedFieldType fieldType = searchExecutionContext.getFieldType(matchField); final SourceOperator queryOperator = switch (matchType) { case "match", "range" -> { QueryList queryList = QueryList.termQueryList(fieldType, searchExecutionContext, inputBlock); - yield new EnrichQuerySourceOperator(blockFactory, queryList, searchExecutionContext.getIndexReader()); + yield new EnrichQuerySourceOperator(driverContext.blockFactory(), queryList, searchExecutionContext.getIndexReader()); } default -> throw new EsqlIllegalArgumentException("illegal match type " + matchType); }; List intermediateOperators = new ArrayList<>(extractFields.size() + 2); final ElementType[] mergingTypes = new ElementType[extractFields.size()]; - // load the fields List fields = new ArrayList<>(extractFields.size()); for (int i = 0; i < extractFields.size(); i++) { @@ -273,7 +282,7 @@ private void doLookup( } intermediateOperators.add( new ValuesSourceReaderOperator( - blockFactory, + driverContext.blockFactory(), fields, List.of(new ValuesSourceReaderOperator.ShardContext(searchContext.searcher().getIndexReader(), () -> { throw new UnsupportedOperationException("can't load _source as part of enrich"); @@ -289,19 +298,26 @@ private void doLookup( // merging field-values by position final int[] mergingChannels = IntStream.range(0, extractFields.size()).map(i -> i + 1).toArray(); intermediateOperators.add( - new MergePositionsOperator(singleLeaf, inputPage.getPositionCount(), 0, mergingChannels, mergingTypes, blockFactory) + new MergePositionsOperator( + singleLeaf, + inputPage.getPositionCount(), + 0, + mergingChannels, + mergingTypes, + driverContext.blockFactory() + ) ); AtomicReference result = new AtomicReference<>(); OutputOperator outputOperator = new OutputOperator(List.of(), Function.identity(), result::set); Driver driver = new Driver( "enrich-lookup:" + sessionId, - new DriverContext(bigArrays, blockFactory), + driverContext, () -> lookupDescription(sessionId, shardId, matchType, matchField, extractFields, inputPage.getPositionCount()), queryOperator, intermediateOperators, outputOperator, Driver.DEFAULT_STATUS_INTERVAL, - searchContext + localBreaker ); task.addListener(() -> { String reason = Objects.requireNonNullElse(task.getReasonCancelled(), "task was cancelled"); @@ -309,6 +325,7 @@ private void doLookup( }); var threadContext = transportService.getThreadPool().getThreadContext(); + localBreaker = null; Driver.start(threadContext, executor, driver, Driver.DEFAULT_MAX_ITERATIONS, listener.map(ignored -> { Page out = result.get(); if (out == null) { @@ -318,6 +335,8 @@ private void doLookup( })); } catch (Exception e) { listener.onFailure(e); + } finally { + Releasables.close(localBreaker); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index c531fd01c2a40..e4e2402a9c7a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.esql.planner; import org.apache.lucene.search.Query; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.LuceneCountOperator; import org.elasticsearch.compute.lucene.LuceneOperator; @@ -117,6 +119,7 @@ public class LocalExecutionPlanner { private final CancellableTask parentTask; private final BigArrays bigArrays; private final BlockFactory blockFactory; + private final Settings settings; private final EsqlConfiguration configuration; private final ExchangeSourceHandler exchangeSourceHandler; private final ExchangeSinkHandler exchangeSinkHandler; @@ -128,6 +131,7 @@ public LocalExecutionPlanner( CancellableTask parentTask, BigArrays bigArrays, BlockFactory blockFactory, + Settings settings, EsqlConfiguration configuration, ExchangeSourceHandler exchangeSourceHandler, ExchangeSinkHandler exchangeSinkHandler, @@ -138,6 +142,7 @@ public LocalExecutionPlanner( this.parentTask = parentTask; this.bigArrays = bigArrays; this.blockFactory = blockFactory; + this.settings = settings; this.exchangeSourceHandler = exchangeSourceHandler; this.exchangeSinkHandler = exchangeSinkHandler; this.enrichLookupService = enrichLookupService; @@ -154,7 +159,8 @@ public LocalExecutionPlan plan(PhysicalPlan node) { new Holder<>(DriverParallelism.SINGLE), configuration.pragmas(), bigArrays, - blockFactory + blockFactory, + settings ); // workaround for https://github.com/elastic/elasticsearch/issues/99782 @@ -165,9 +171,10 @@ public LocalExecutionPlan plan(PhysicalPlan node) { PhysicalOperation physicalOperation = plan(node, context); + final TimeValue statusInterval = configuration.pragmas().statusInterval(); context.addDriverFactory( new DriverFactory( - new DriverSupplier(context.bigArrays, context.blockFactory, physicalOperation, configuration.pragmas().statusInterval()), + new DriverSupplier(context.bigArrays, context.blockFactory, physicalOperation, statusInterval, settings), context.driverParallelism().get() ) ); @@ -691,7 +698,8 @@ public record LocalExecutionPlannerContext( Holder driverParallelism, QueryPragmas queryPragmas, BigArrays bigArrays, - BlockFactory blockFactory + BlockFactory blockFactory, + Settings settings ) { void addDriverFactory(DriverFactory driverFactory) { driverFactories.add(driverFactory); @@ -715,26 +723,44 @@ int pageSize(Integer estimatedRowSize) { } } - record DriverSupplier(BigArrays bigArrays, BlockFactory blockFactory, PhysicalOperation physicalOperation, TimeValue statusInterval) - implements - Function, - Describable { + record DriverSupplier( + BigArrays bigArrays, + BlockFactory blockFactory, + PhysicalOperation physicalOperation, + TimeValue statusInterval, + Settings settings + ) implements Function, Describable { @Override public Driver apply(String sessionId) { SourceOperator source = null; List operators = new ArrayList<>(); SinkOperator sink = null; boolean success = false; - var driverContext = new DriverContext(bigArrays, blockFactory); + var localBreakerSettings = new LocalCircuitBreaker.SizeSettings(settings); + final var localBreaker = new LocalCircuitBreaker( + blockFactory.breaker(), + localBreakerSettings.overReservedBytes(), + localBreakerSettings.maxOverReservedBytes() + ); + var driverContext = new DriverContext(bigArrays, blockFactory.newChildFactory(localBreaker)); try { source = physicalOperation.source(driverContext); physicalOperation.operators(operators, driverContext); sink = physicalOperation.sink(driverContext); success = true; - return new Driver(sessionId, driverContext, physicalOperation::describe, source, operators, sink, statusInterval, () -> {}); + return new Driver( + sessionId, + driverContext, + physicalOperation::describe, + source, + operators, + sink, + statusInterval, + localBreaker + ); } finally { if (false == success) { - Releasables.close(source, () -> Releasables.close(operators), sink); + Releasables.close(source, () -> Releasables.close(operators), sink, localBreaker); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 8d7024f7d889d..dd5ae00294ed0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.BigArrays; @@ -100,12 +101,14 @@ public record Result(List pages, List profiles) {} private final DriverTaskRunner driverRunner; private final ExchangeService exchangeService; private final EnrichLookupService enrichLookupService; + private final ClusterService clusterService; public ComputeService( SearchService searchService, TransportService transportService, ExchangeService exchangeService, EnrichLookupService enrichLookupService, + ClusterService clusterService, ThreadPool threadPool, BigArrays bigArrays, BlockFactory blockFactory @@ -119,6 +122,7 @@ public ComputeService( this.driverRunner = new DriverTaskRunner(transportService, this.esqlExecutor); this.exchangeService = exchangeService; this.enrichLookupService = enrichLookupService; + this.clusterService = clusterService; } public void execute( @@ -278,6 +282,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, task, bigArrays, blockFactory, + clusterService.getSettings(), context.configuration, context.exchangeSource(), context.exchangeSink(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 780d812e2c23b..d272aba26e4e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -74,6 +74,7 @@ public TransportEsqlQueryAction( transportService, exchangeService, enrichLookupService, + clusterService, threadPool, bigArrays, blockFactory diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index dbb7c1f130a1b..17ed0c1223636 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -329,11 +329,14 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { String sessionId = "csv-test"; ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler(between(1, 64), threadPool.executor(ESQL_THREAD_POOL_NAME)); ExchangeSinkHandler exchangeSink = new ExchangeSinkHandler(between(1, 64), threadPool::relativeTimeInMillis); + Settings.Builder settings = Settings.builder(); + LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner( sessionId, new CancellableTask(1, "transport", "esql", null, TaskId.EMPTY_TASK_ID, Map.of()), bigArrays, new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays), + randomNodeSettings(), configuration, exchangeSource, exchangeSink, @@ -408,6 +411,15 @@ protected void start(Driver driver, ActionListener driverListener) { } } + private Settings randomNodeSettings() { + Settings.Builder builder = Settings.builder(); + if (randomBoolean()) { + builder.put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_SIZE_SETTING, ByteSizeValue.ofBytes(randomIntBetween(0, 4096))); + builder.put(BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(randomIntBetween(0, 16 * 1024))); + } + return builder.build(); + } + private Throwable reworkException(Throwable th) { StackTraceElement[] stackTrace = th.getStackTrace(); StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1]; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index a01d82731bc94..24fcae0f6bbb0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -125,6 +125,7 @@ private LocalExecutionPlanner planner() throws IOException { null, BigArrays.NON_RECYCLING_INSTANCE, BlockFactory.getNonBreakingInstance(), + Settings.EMPTY, config(), null, null, From 0e37a919b0fa1ac87139f92a2c65605677b121fa Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 5 Dec 2023 17:46:50 +0100 Subject: [PATCH 226/263] Mute NestedAggregatorTests (#102998) This suite can potentially fail in most of the tests after the lucene upgrade to lucene 9.9. It is just a test failure and we will providing a fix soon but in the meanwhile lets mute it. relates https://github.com/elastic/elasticsearch/issues/102974 --- .../aggregations/bucket/nested/NestedAggregatorTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 83a2e856a512e..82f4597252ac9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; @@ -105,6 +106,7 @@ * prefixed with the nested path: nestedPath + "." + fieldName * */ +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public class NestedAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD_NAME = "number"; @@ -502,7 +504,6 @@ public void testNestedOrdering() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public void testNestedOrdering_random() throws IOException { int numBooks = randomIntBetween(32, 512); List> books = new ArrayList<>(); @@ -563,7 +564,6 @@ public void testNestedOrdering_random() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public void testPreGetChildLeafCollectors() throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { From c603996c74492c0542b46de11b309db9958e86f1 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 5 Dec 2023 18:01:17 +0100 Subject: [PATCH 227/263] [Connectors API] Connector Sync Job List Endpoint (#102986) Add connector sync job list endpoint --- .../api/connector_sync_job.list.json | 46 +++++ .../entsearch/470_connector_sync_job_list.yml | 175 +++++++++++++++++ .../xpack/application/EnterpriseSearch.java | 5 + .../connector/syncjob/ConnectorSyncJob.java | 2 +- .../syncjob/ConnectorSyncJobIndexService.java | 110 ++++++++++- .../action/ListConnectorSyncJobsAction.java | 172 ++++++++++++++++ .../RestListConnectorSyncJobsAction.java | 49 +++++ .../TransportListConnectorSyncJobsAction.java | 63 ++++++ .../ConnectorSyncJobIndexServiceTests.java | 184 ++++++++++++++++-- .../syncjob/ConnectorSyncJobTestUtils.java | 10 + ...cJobsActionRequestBWCSerializingTests.java | 58 ++++++ ...JobsActionResponseBWCSerializingTests.java | 53 +++++ .../ListConnectorSyncJobsActionTests.java | 25 +++ ...sportListConnectorSyncJobsActionTests.java | 74 +++++++ .../xpack/security/operator/Constants.java | 1 + 15 files changed, 1008 insertions(+), 19 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json new file mode 100644 index 0000000000000..d09a720f748ec --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json @@ -0,0 +1,46 @@ +{ + "connector_sync_job.list": { + "documentation": { + "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "description": "Lists all connector sync jobs." + }, + "stability": "experimental", + "visibility": "feature_flag", + "feature_flag": "es.connector_api_feature_flag_enabled", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_connector/_sync_job", + "methods": [ + "GET" + ] + } + ] + }, + "params": { + "from": { + "type": "int", + "default": 0, + "description": "Starting offset (default: 0)" + }, + "size": { + "type": "int", + "default": 100, + "description": "specifies a max number of results to get (default: 100)" + }, + "status": { + "type": "string", + "description": "Sync job status, which sync jobs are fetched for" + }, + "connector_id": { + "type": "string", + "description": "Id of the connector to fetch the sync jobs for" + } + } + } +} diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml new file mode 100644 index 0000000000000..e076b88528ad5 --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/470_connector_sync_job_list.yml @@ -0,0 +1,175 @@ +setup: + - skip: + version: " - 8.11.99" + reason: Introduced in 8.12.0 + - do: + connector.put: + connector_id: connector-one + body: + index_name: search-test + name: my-connector + language: de + is_native: false + service_type: super-connector + - do: + connector.put: + connector_id: connector-two + body: + index_name: search-test-two + name: my-connector-two + language: de + is_native: false + service_type: super-connector + + +--- +"List Connector Sync Jobs": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-three-id } + - do: + connector_sync_job.list: { } + + - match: { count: 3 } + + # Ascending order by creation_date for results + - match: { results.0.id: $sync-job-one-id } + - match: { results.1.id: $sync-job-two-id } + - match: { results.2.id: $sync-job-three-id } + +--- +"List Connector Sync Jobs - with from": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-three-id } + - do: + connector_sync_job.list: + from: 1 + + - match: { count: 3 } + + # Ascending order by creation_date for results + - match: { results.0.id: $sync-job-two-id } + - match: { results.1.id: $sync-job-three-id } + +--- +"List Connector Sync Jobs - with size": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-three-id } + - do: + connector_sync_job.list: + size: 1 + + - match: { count: 3 } + + - match: { results.0.id: $sync-job-one-id } + +--- +"List Connector Sync Jobs - Get pending jobs": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.cancel: + connector_sync_job_id: $sync-job-two-id + - do: + connector_sync_job.list: + status: pending + - match: { count: 1 } + - match: { results.0.id: $sync-job-one-id } + +--- +"List Connector Sync Jobs - Get jobs for connector one": + - do: + connector_sync_job.post: + body: + id: connector-one + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-one-id } + - do: + connector_sync_job.post: + body: + id: connector-two + job_type: access_control + trigger_method: scheduled + - set: { id: sync-job-two-id } + - do: + connector_sync_job.list: + connector_id: connector-one + - match: { count: 1 } + - match: { results.0.id: $sync-job-one-id } + + +--- +"List Connector Sync Jobs - empty list": + - do: + connector_sync_job.list: { } + + - match: { count: 0 } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 2119d9d0a4c30..73025c0b23b56 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -85,11 +85,13 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.ListConnectorSyncJobsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestGetConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.RestListConnectorSyncJobsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.RestUpdateConnectorSyncJobIngestionStatsAction; @@ -97,6 +99,7 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.TransportCheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportDeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportGetConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.TransportListConnectorSyncJobsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportPostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.TransportUpdateConnectorSyncJobIngestionStatsAction; @@ -238,6 +241,7 @@ protected XPackLicenseState getLicenseState() { new ActionHandler<>(DeleteConnectorSyncJobAction.INSTANCE, TransportDeleteConnectorSyncJobAction.class), new ActionHandler<>(CheckInConnectorSyncJobAction.INSTANCE, TransportCheckInConnectorSyncJobAction.class), new ActionHandler<>(CancelConnectorSyncJobAction.INSTANCE, TransportCancelConnectorSyncJobAction.class), + new ActionHandler<>(ListConnectorSyncJobsAction.INSTANCE, TransportListConnectorSyncJobsAction.class), new ActionHandler<>(UpdateConnectorSyncJobErrorAction.INSTANCE, TransportUpdateConnectorSyncJobErrorAction.class), new ActionHandler<>( UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, @@ -314,6 +318,7 @@ public List getRestHandlers( new RestDeleteConnectorSyncJobAction(), new RestCancelConnectorSyncJobAction(), new RestCheckInConnectorSyncJobAction(), + new RestListConnectorSyncJobsAction(), new RestUpdateConnectorSyncJobErrorAction(), new RestUpdateConnectorSyncJobIngestionStatsAction() ) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index 211a423dab99e..f14d0fa52b1c7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -93,7 +93,7 @@ public class ConnectorSyncJob implements Writeable, ToXContentObject { static final ParseField STARTED_AT_FIELD = new ParseField("started_at"); - static final ParseField STATUS_FIELD = new ParseField("status"); + public static final ParseField STATUS_FIELD = new ParseField("status"); public static final ParseField TOTAL_DOCUMENT_COUNT_FIELD = new ParseField("total_document_count"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 2c9ac7c06b91c..326fdb0367e5c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -18,6 +18,8 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; @@ -26,6 +28,13 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.DocumentMissingException; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; @@ -40,11 +49,14 @@ import java.io.IOException; import java.time.Instant; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; +import java.util.stream.Stream; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ClientHelper.CONNECTORS_ORIGIN; @@ -252,11 +264,103 @@ public void cancelConnectorSyncJob(String connectorSyncJobId, ActionListener listener + ) { + try { + QueryBuilder query = buildListQuery(connectorId, syncStatus); + + final SearchSourceBuilder searchSource = new SearchSourceBuilder().from(from) + .size(size) + .query(query) + .fetchSource(true) + .sort(ConnectorSyncJob.CREATED_AT_FIELD.getPreferredName(), SortOrder.ASC); + + final SearchRequest searchRequest = new SearchRequest(CONNECTOR_SYNC_JOB_INDEX_NAME).source(searchSource); + + clientWithOrigin.search(searchRequest, new ActionListener<>() { + @Override + public void onResponse(SearchResponse searchResponse) { + try { + listener.onResponse(mapSearchResponseToConnectorSyncJobsList(searchResponse)); + } catch (Exception e) { + listener.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + if (e instanceof IndexNotFoundException) { + listener.onResponse(new ConnectorSyncJobIndexService.ConnectorSyncJobsResult(Collections.emptyList(), 0L)); + return; + } + listener.onFailure(e); + } + }); + } catch (Exception e) { + listener.onFailure(e); + } + } + + private static QueryBuilder buildListQuery(String connectorId, ConnectorSyncStatus syncStatus) { + boolean usesFilter = Stream.of(connectorId, syncStatus).anyMatch(Objects::nonNull); + BoolQueryBuilder boolFilterQueryBuilder = new BoolQueryBuilder(); + + if (usesFilter) { + if (Objects.nonNull(connectorId)) { + TermQueryBuilder connectorIdQuery = new TermQueryBuilder( + ConnectorSyncJob.CONNECTOR_FIELD.getPreferredName() + "." + Connector.ID_FIELD.getPreferredName(), + connectorId + ); + boolFilterQueryBuilder.must().add(connectorIdQuery); + } + + if (Objects.nonNull(syncStatus)) { + TermQueryBuilder syncStatusQuery = new TermQueryBuilder(ConnectorSyncJob.STATUS_FIELD.getPreferredName(), syncStatus); + boolFilterQueryBuilder.must().add(syncStatusQuery); + } + } + + return usesFilter ? boolFilterQueryBuilder : new MatchAllQueryBuilder(); + } + + private ConnectorSyncJobsResult mapSearchResponseToConnectorSyncJobsList(SearchResponse searchResponse) { + final List connectorSyncJobs = Arrays.stream(searchResponse.getHits().getHits()) + .map(ConnectorSyncJobIndexService::hitToConnectorSyncJob) + .toList(); + + return new ConnectorSyncJobIndexService.ConnectorSyncJobsResult( + connectorSyncJobs, + (int) searchResponse.getHits().getTotalHits().value + ); + } + + private static ConnectorSyncJob hitToConnectorSyncJob(SearchHit searchHit) { + // TODO: don't return sensitive data from configuration inside connector in list endpoint + + return ConnectorSyncJob.fromXContentBytes(searchHit.getSourceRef(), XContentType.JSON); + } + + public record ConnectorSyncJobsResult(List connectorSyncJobs, long totalResults) {} + + /** + * Updates the ingestion stats of the {@link ConnectorSyncJob} in the underlying index. + * + * @param request Request containing the updates to the ingestion stats. + * @param listener The action listener to invoke on response/failure. + */ public void updateConnectorSyncJobIngestionStats( UpdateConnectorSyncJobIngestionStatsAction.Request request, ActionListener listener diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java new file mode 100644 index 0000000000000..0a22b6f938142 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsAction.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; +import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.action.util.QueryPage; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class ListConnectorSyncJobsAction extends ActionType { + + public static final ListConnectorSyncJobsAction INSTANCE = new ListConnectorSyncJobsAction(); + public static final String NAME = "cluster:admin/xpack/connector/sync_job/list"; + + public ListConnectorSyncJobsAction() { + super(NAME, ListConnectorSyncJobsAction.Response::new); + } + + public static class Request extends ActionRequest implements ToXContentObject { + public static final ParseField CONNECTOR_ID_FIELD = new ParseField("connector_id"); + private static final ParseField PAGE_PARAMS_FIELD = new ParseField("pageParams"); + private final PageParams pageParams; + private final String connectorId; + private final ConnectorSyncStatus connectorSyncStatus; + + public Request(StreamInput in) throws IOException { + super(in); + this.pageParams = new PageParams(in); + this.connectorId = in.readOptionalString(); + this.connectorSyncStatus = in.readOptionalEnum(ConnectorSyncStatus.class); + } + + public Request(PageParams pageParams, String connectorId, ConnectorSyncStatus connectorSyncStatus) { + this.pageParams = pageParams; + this.connectorId = connectorId; + this.connectorSyncStatus = connectorSyncStatus; + } + + public PageParams getPageParams() { + return pageParams; + } + + public String getConnectorId() { + return connectorId; + } + + public ConnectorSyncStatus getConnectorSyncStatus() { + return connectorSyncStatus; + } + + @Override + public ActionRequestValidationException validate() { + // Pagination validation is done as part of PageParams constructor + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + pageParams.writeTo(out); + out.writeOptionalString(connectorId); + out.writeOptionalEnum(connectorSyncStatus); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(pageParams, request.pageParams) + && Objects.equals(connectorId, request.connectorId) + && connectorSyncStatus == request.connectorSyncStatus; + } + + @Override + public int hashCode() { + return Objects.hash(pageParams, connectorId, connectorSyncStatus); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "list_connector_sync_jobs_request", + p -> new ListConnectorSyncJobsAction.Request( + (PageParams) p[0], + (String) p[1], + p[2] != null ? ConnectorSyncStatus.fromString((String) p[2]) : null + ) + ); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> PageParams.fromXContent(p), PAGE_PARAMS_FIELD); + PARSER.declareString(optionalConstructorArg(), CONNECTOR_ID_FIELD); + PARSER.declareString(optionalConstructorArg(), ConnectorSyncJob.STATUS_FIELD); + } + + public static ListConnectorSyncJobsAction.Request parse(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(PAGE_PARAMS_FIELD.getPreferredName(), pageParams); + builder.field(CONNECTOR_ID_FIELD.getPreferredName(), connectorId); + builder.field(ConnectorSyncJob.STATUS_FIELD.getPreferredName(), connectorSyncStatus); + } + builder.endObject(); + return builder; + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + public static final ParseField RESULTS_FIELD = new ParseField("results"); + + final QueryPage queryPage; + + public Response(StreamInput in) throws IOException { + super(in); + this.queryPage = new QueryPage<>(in, ConnectorSyncJob::new); + } + + public Response(List items, Long totalResults) { + this.queryPage = new QueryPage<>(items, totalResults, RESULTS_FIELD); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + queryPage.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return queryPage.toXContent(builder, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(queryPage, response.queryPage); + } + + @Override + public int hashCode() { + return Objects.hash(queryPage); + } + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java new file mode 100644 index 0000000000000..ef8851636be1b --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestListConnectorSyncJobsAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; +import org.elasticsearch.xpack.core.action.util.PageParams; + +import java.io.IOException; +import java.util.List; + +public class RestListConnectorSyncJobsAction extends BaseRestHandler { + @Override + public String getName() { + return "connector_sync_jobs_list_action"; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.GET, "/" + EnterpriseSearch.CONNECTOR_SYNC_JOB_API_ENDPOINT)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + int from = restRequest.paramAsInt("from", PageParams.DEFAULT_FROM); + int size = restRequest.paramAsInt("size", PageParams.DEFAULT_SIZE); + String connectorId = restRequest.param(ListConnectorSyncJobsAction.Request.CONNECTOR_ID_FIELD.getPreferredName()); + String statusString = restRequest.param(ConnectorSyncJob.STATUS_FIELD.getPreferredName()); + ConnectorSyncStatus status = statusString != null ? ConnectorSyncStatus.fromString(statusString) : null; + + ListConnectorSyncJobsAction.Request request = new ListConnectorSyncJobsAction.Request( + new PageParams(from, size), + connectorId, + status + ); + + return channel -> client.execute(ListConnectorSyncJobsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsAction.java new file mode 100644 index 0000000000000..4ba662f77f8fa --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsAction.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; +import org.elasticsearch.xpack.core.action.util.PageParams; + +public class TransportListConnectorSyncJobsAction extends HandledTransportAction< + ListConnectorSyncJobsAction.Request, + ListConnectorSyncJobsAction.Response> { + protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; + + @Inject + public TransportListConnectorSyncJobsAction( + TransportService transportService, + ClusterService clusterService, + ActionFilters actionFilters, + Client client + ) { + super( + ListConnectorSyncJobsAction.NAME, + transportService, + actionFilters, + ListConnectorSyncJobsAction.Request::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client); + } + + @Override + protected void doExecute( + Task task, + ListConnectorSyncJobsAction.Request request, + ActionListener listener + ) { + final PageParams pageParams = request.getPageParams(); + final String connectorId = request.getConnectorId(); + final ConnectorSyncStatus syncStatus = request.getConnectorSyncStatus(); + + connectorSyncJobIndexService.listConnectorSyncJobs( + pageParams.getFrom(), + pageParams.getSize(), + connectorId, + syncStatus, + listener.map(r -> new ListConnectorSyncJobsAction.Response(r.connectorSyncJobs(), r.totalResults())) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java index ab16fb8a46eb0..85d8826b98683 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexServiceTests.java @@ -31,8 +31,10 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; import org.junit.Before; +import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -60,12 +62,21 @@ public class ConnectorSyncJobIndexServiceTests extends ESSingleNodeTestCase { private static final int ONE_SECOND_IN_MILLIS = 1000; private ConnectorSyncJobIndexService connectorSyncJobIndexService; - private Connector connector; + private Connector connectorOne; + private Connector connectorTwo; @Before public void setup() throws Exception { - connector = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); + connectorOne = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); + connectorTwo = ConnectorTestUtils.getRandomSyncJobConnectorInfo(); + createConnector(connectorOne); + createConnector(connectorTwo); + + this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client()); + } + + private void createConnector(Connector connector) throws IOException, InterruptedException, ExecutionException, TimeoutException { final IndexRequest indexRequest = new IndexRequest(ConnectorIndexService.CONNECTOR_INDEX_NAME).opType(DocWriteRequest.OpType.INDEX) .id(connector.getConnectorId()) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) @@ -74,13 +85,11 @@ public void setup() throws Exception { // wait 10 seconds for connector creation index.get(TIMEOUT_SECONDS, TimeUnit.SECONDS); - - this.connectorSyncJobIndexService = new ConnectorSyncJobIndexService(client()); } public void testCreateConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); ConnectorSyncJobType requestJobType = syncJobRequest.getJobType(); ConnectorSyncJobTriggerMethod requestTriggerMethod = syncJobRequest.getTriggerMethod(); @@ -101,7 +110,7 @@ public void testCreateConnectorSyncJob() throws Exception { public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeToBeSet() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( - connector.getConnectorId(), + connectorOne.getConnectorId(), null, ConnectorSyncJobTriggerMethod.ON_DEMAND ); @@ -114,7 +123,7 @@ public void testCreateConnectorSyncJob_WithMissingJobType_ExpectDefaultJobTypeTo public void testCreateConnectorSyncJob_WithMissingTriggerMethod_ExpectDefaultTriggerMethodToBeSet() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = new PostConnectorSyncJobAction.Request( - connector.getConnectorId(), + connectorOne.getConnectorId(), ConnectorSyncJobType.FULL, null ); @@ -139,7 +148,7 @@ public void testCreateConnectorSyncJob_WithMissingConnectorId_ExpectException() public void testDeleteConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -157,7 +166,7 @@ public void testDeleteConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testGetConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); ConnectorSyncJobType jobType = syncJobRequest.getJobType(); ConnectorSyncJobTriggerMethod triggerMethod = syncJobRequest.getTriggerMethod(); @@ -170,7 +179,7 @@ public void testGetConnectorSyncJob() throws Exception { assertThat(syncJob.getId(), equalTo(syncJobId)); assertThat(syncJob.getJobType(), equalTo(jobType)); assertThat(syncJob.getTriggerMethod(), equalTo(triggerMethod)); - assertThat(syncJob.getConnector().getConnectorId(), equalTo(connector.getConnectorId())); + assertThat(syncJob.getConnector().getConnectorId(), equalTo(connectorOne.getConnectorId())); } public void testGetConnectorSyncJob_WithMissingSyncJobId_ExpectException() { @@ -179,7 +188,7 @@ public void testGetConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testCheckInConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -218,7 +227,7 @@ public void testCheckInConnectorSyncJob_WithMissingSyncJobId_ExpectException() { public void testCancelConnectorSyncJob() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -254,9 +263,122 @@ public void testCancelConnectorSyncJob_WithMissingSyncJobId_ExpectException() { expectThrows(ResourceNotFoundException.class, () -> awaitCancelConnectorSyncJob(NON_EXISTING_SYNC_JOB_ID)); } + public void testListConnectorSyncJobs() throws Exception { + int numberOfSyncJobs = 5; + List syncJobs = new ArrayList<>(); + + for (int i = 0; i < numberOfSyncJobs; i++) { + PostConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorOne.getConnectorId() + ); + PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(request); + ConnectorSyncJob syncJob = awaitGetConnectorSyncJob(response.getId()); + syncJobs.add(syncJob); + } + + ConnectorSyncJobIndexService.ConnectorSyncJobsResult firstTwoSyncJobs = awaitListConnectorSyncJobs(0, 2, null, null); + ConnectorSyncJobIndexService.ConnectorSyncJobsResult nextTwoSyncJobs = awaitListConnectorSyncJobs(2, 2, null, null); + ConnectorSyncJobIndexService.ConnectorSyncJobsResult lastSyncJobs = awaitListConnectorSyncJobs(4, 100, null, null); + + ConnectorSyncJob firstSyncJob = firstTwoSyncJobs.connectorSyncJobs().get(0); + ConnectorSyncJob secondSyncJob = firstTwoSyncJobs.connectorSyncJobs().get(1); + ConnectorSyncJob thirdSyncJob = nextTwoSyncJobs.connectorSyncJobs().get(0); + ConnectorSyncJob fourthSyncJob = nextTwoSyncJobs.connectorSyncJobs().get(1); + ConnectorSyncJob fifthSyncJob = lastSyncJobs.connectorSyncJobs().get(0); + + assertThat(firstTwoSyncJobs.connectorSyncJobs().size(), equalTo(2)); + assertThat(firstTwoSyncJobs.totalResults(), equalTo(5L)); + + assertThat(nextTwoSyncJobs.connectorSyncJobs().size(), equalTo(2)); + assertThat(nextTwoSyncJobs.totalResults(), equalTo(5L)); + + assertThat(lastSyncJobs.connectorSyncJobs().size(), equalTo(1)); + assertThat(lastSyncJobs.totalResults(), equalTo(5L)); + + assertThat(firstSyncJob, equalTo(syncJobs.get(0))); + assertThat(secondSyncJob, equalTo(syncJobs.get(1))); + assertThat(thirdSyncJob, equalTo(syncJobs.get(2))); + assertThat(fourthSyncJob, equalTo(syncJobs.get(3))); + assertThat(fifthSyncJob, equalTo(syncJobs.get(4))); + + // assert ordering: ascending order by creation date + assertTrue(fifthSyncJob.getCreatedAt().isAfter(fourthSyncJob.getCreatedAt())); + assertTrue(fourthSyncJob.getCreatedAt().isAfter(thirdSyncJob.getCreatedAt())); + assertTrue(thirdSyncJob.getCreatedAt().isAfter(secondSyncJob.getCreatedAt())); + assertTrue(secondSyncJob.getCreatedAt().isAfter(firstSyncJob.getCreatedAt())); + } + + public void testListConnectorSyncJobs_WithStatusPending_GivenOnePendingTwoCancelled_ExpectOnePending() throws Exception { + String connectorId = connectorOne.getConnectorId(); + + PostConnectorSyncJobAction.Request requestOne = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(connectorId); + PostConnectorSyncJobAction.Request requestTwo = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(connectorId); + PostConnectorSyncJobAction.Request requestThree = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest(connectorId); + + PostConnectorSyncJobAction.Response responseOne = awaitPutConnectorSyncJob(requestOne); + PostConnectorSyncJobAction.Response responseTwo = awaitPutConnectorSyncJob(requestTwo); + PostConnectorSyncJobAction.Response responseThree = awaitPutConnectorSyncJob(requestThree); + + String syncJobOneId = responseOne.getId(); + String syncJobTwoId = responseTwo.getId(); + String syncJobThreeId = responseThree.getId(); + + // cancel sync job two and three -> one pending left + awaitCancelConnectorSyncJob(syncJobTwoId); + awaitCancelConnectorSyncJob(syncJobThreeId); + + ConnectorSyncJobIndexService.ConnectorSyncJobsResult connectorSyncJobsResult = awaitListConnectorSyncJobs( + 0, + 100, + null, + ConnectorSyncStatus.PENDING + ); + long numberOfResults = connectorSyncJobsResult.totalResults(); + String idOfReturnedSyncJob = connectorSyncJobsResult.connectorSyncJobs().get(0).getId(); + + assertThat(numberOfResults, equalTo(1L)); + assertThat(idOfReturnedSyncJob, equalTo(syncJobOneId)); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/enterprise-search-team/issues/6351") + public void testListConnectorSyncJobs_WithConnectorOneId_GivenTwoOverallOneFromConnectorOne_ExpectOne() throws Exception { + String connectorOneId = connectorOne.getConnectorId(); + String connectorTwoId = connectorTwo.getConnectorId(); + + PostConnectorSyncJobAction.Request requestOne = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorOneId + ); + PostConnectorSyncJobAction.Request requestTwo = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( + connectorTwoId + ); + + awaitPutConnectorSyncJob(requestOne); + awaitPutConnectorSyncJob(requestTwo); + + ConnectorSyncJobIndexService.ConnectorSyncJobsResult connectorSyncJobsResult = awaitListConnectorSyncJobs( + 0, + 100, + connectorOneId, + null + ); + + long numberOfResults = connectorSyncJobsResult.totalResults(); + String connectorIdOfReturnedSyncJob = connectorSyncJobsResult.connectorSyncJobs().get(0).getConnector().getConnectorId(); + + assertThat(numberOfResults, equalTo(1L)); + assertThat(connectorIdOfReturnedSyncJob, equalTo(connectorOneId)); + } + + public void testListConnectorSyncJobs_WithNoSyncJobs_ReturnEmptyResult() throws Exception { + ConnectorSyncJobIndexService.ConnectorSyncJobsResult firstOneHundredSyncJobs = awaitListConnectorSyncJobs(0, 100, null, null); + + assertThat(firstOneHundredSyncJobs.connectorSyncJobs().size(), equalTo(0)); + assertThat(firstOneHundredSyncJobs.totalResults(), equalTo(0L)); + } + public void testUpdateConnectorSyncJobError() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -285,7 +407,7 @@ public void testUpdateConnectorSyncJobError_WithMissingSyncJobId_ExceptException public void testUpdateConnectorSyncJobIngestionStats() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -329,7 +451,7 @@ public void testUpdateConnectorSyncJobIngestionStats() throws Exception { public void testUpdateConnectorSyncJobIngestionStats_WithoutLastSeen_ExpectUpdateOfLastSeen() throws Exception { PostConnectorSyncJobAction.Request syncJobRequest = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest( - connector.getConnectorId() + connectorOne.getConnectorId() ); PostConnectorSyncJobAction.Response response = awaitPutConnectorSyncJob(syncJobRequest); String syncJobId = response.getId(); @@ -465,6 +587,38 @@ private static void assertFieldsDidNotUpdateExceptFieldList( } } + private ConnectorSyncJobIndexService.ConnectorSyncJobsResult awaitListConnectorSyncJobs( + int from, + int size, + String connectorId, + ConnectorSyncStatus syncStatus + ) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference result = new AtomicReference<>(null); + final AtomicReference exc = new AtomicReference<>(null); + + connectorSyncJobIndexService.listConnectorSyncJobs(from, size, connectorId, syncStatus, new ActionListener<>() { + @Override + public void onResponse(ConnectorSyncJobIndexService.ConnectorSyncJobsResult connectorSyncJobsResult) { + result.set(connectorSyncJobsResult); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exc.set(e); + latch.countDown(); + } + }); + + assertTrue("Timeout waiting for list request", latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)); + if (exc.get() != null) { + throw exc.get(); + } + assertNotNull("Received null response from list request", result.get()); + return result.get(); + } + private UpdateResponse awaitUpdateConnectorSyncJob(String syncJobId, String error) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java index fb412db168605..96a12c9efac51 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTestUtils.java @@ -13,9 +13,11 @@ import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.GetConnectorSyncJobAction; +import org.elasticsearch.xpack.application.connector.syncjob.action.ListConnectorSyncJobsAction; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobErrorAction; import org.elasticsearch.xpack.application.connector.syncjob.action.UpdateConnectorSyncJobIngestionStatsAction; +import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; import java.time.Instant; @@ -146,4 +148,12 @@ public static GetConnectorSyncJobAction.Request getRandomGetConnectorSyncJobRequ public static GetConnectorSyncJobAction.Response getRandomGetConnectorSyncJobResponse() { return new GetConnectorSyncJobAction.Response(getRandomConnectorSyncJob()); } + + public static ListConnectorSyncJobsAction.Request getRandomListConnectorSyncJobsActionRequest() { + return new ListConnectorSyncJobsAction.Request( + SearchApplicationTestUtils.randomPageParams(), + randomAlphaOfLength(10), + ConnectorTestUtils.getRandomSyncStatus() + ); + } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..52ad207d18ffd --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionRequestBWCSerializingTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; +import org.elasticsearch.xpack.application.connector.ConnectorTestUtils; +import org.elasticsearch.xpack.application.search.SearchApplicationTestUtils; +import org.elasticsearch.xpack.core.action.util.PageParams; +import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; + +import java.io.IOException; + +public class ListConnectorSyncJobsActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase< + ListConnectorSyncJobsAction.Request> { + @Override + protected Writeable.Reader instanceReader() { + return ListConnectorSyncJobsAction.Request::new; + } + + @Override + protected ListConnectorSyncJobsAction.Request createTestInstance() { + PageParams pageParams = SearchApplicationTestUtils.randomPageParams(); + String connectorId = randomAlphaOfLength(10); + ConnectorSyncStatus syncStatus = ConnectorTestUtils.getRandomSyncStatus(); + + return new ListConnectorSyncJobsAction.Request(pageParams, connectorId, syncStatus); + } + + @Override + protected ListConnectorSyncJobsAction.Request mutateInstance(ListConnectorSyncJobsAction.Request instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected ListConnectorSyncJobsAction.Request doParseInstance(XContentParser parser) throws IOException { + return ListConnectorSyncJobsAction.Request.parse(parser); + } + + @Override + protected ListConnectorSyncJobsAction.Request mutateInstanceForVersion( + ListConnectorSyncJobsAction.Request instance, + TransportVersion version + ) { + return new ListConnectorSyncJobsAction.Request( + instance.getPageParams(), + instance.getConnectorId(), + instance.getConnectorSyncStatus() + ); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..48a358ad043cd --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionResponseBWCSerializingTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; +import java.util.List; + +public class ListConnectorSyncJobsActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< + ListConnectorSyncJobsAction.Response> { + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(List.of(new NamedWriteableRegistry.Entry(Connector.class, Connector.NAME, Connector::new))); + } + + @Override + protected Writeable.Reader instanceReader() { + return ListConnectorSyncJobsAction.Response::new; + } + + @Override + protected ListConnectorSyncJobsAction.Response createTestInstance() { + return new ListConnectorSyncJobsAction.Response( + randomList(10, ConnectorSyncJobTestUtils::getRandomConnectorSyncJob), + randomLongBetween(0, 100) + ); + } + + @Override + protected ListConnectorSyncJobsAction.Response mutateInstance(ListConnectorSyncJobsAction.Response instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected ListConnectorSyncJobsAction.Response mutateInstanceForVersion( + ListConnectorSyncJobsAction.Response instance, + TransportVersion version + ) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionTests.java new file mode 100644 index 0000000000000..39a7551bdfcab --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/ListConnectorSyncJobsActionTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; + +import static org.hamcrest.Matchers.nullValue; + +public class ListConnectorSyncJobsActionTests extends ESTestCase { + + public void testValidate_WhenPageParamsAreValid_ExpectNoValidationError() { + ListConnectorSyncJobsAction.Request request = ConnectorSyncJobTestUtils.getRandomListConnectorSyncJobsActionRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java new file mode 100644 index 0000000000000..503e7e54255e3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportListConnectorSyncJobsActionTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.syncjob.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportListConnectorSyncJobsActionTests extends ESSingleNodeTestCase { + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportListConnectorSyncJobsAction action; + + @Before + public void setup() { + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportListConnectorSyncJobsAction(transportService, clusterService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testListConnectorSyncJobs_ExpectNoWarnings() throws InterruptedException { + ListConnectorSyncJobsAction.Request request = ConnectorSyncJobTestUtils.getRandomListConnectorSyncJobsActionRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(ListConnectorSyncJobsAction.Request request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for list request", requestTimedOut); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index ccefd8ab6bdb7..6e78eb2fb5b83 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -140,6 +140,7 @@ public class Constants { "cluster:admin/xpack/connector/sync_job/check_in", "cluster:admin/xpack/connector/sync_job/delete", "cluster:admin/xpack/connector/sync_job/get", + "cluster:admin/xpack/connector/sync_job/list", "cluster:admin/xpack/connector/sync_job/post", "cluster:admin/xpack/connector/sync_job/update_error", "cluster:admin/xpack/connector/sync_job/update_stats", From 3a7417e3d93c0806a41031953fe0005fd530a627 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 5 Dec 2023 18:39:32 +0100 Subject: [PATCH 228/263] Move a couple more ActionListener.wrap to ActionListener.delegateFailureAndWrap (#102965) It's in the title. Save a little more in allocations and make the behavior easier to reason about. --- .../reindex/TransportRethrottleAction.java | 6 +- .../TransportRethrottleActionTests.java | 3 + .../action/search/TransportSearchAction.java | 8 +- .../upgrades/SystemIndexMigrator.java | 83 +++++++++---------- .../xpack/ccr/action/CcrRequests.java | 14 ++-- .../AbstractTransportSetResetModeAction.java | 14 ++-- .../xpack/core/ilm/DownsampleStep.java | 2 +- .../SwapAliasesAndDeleteSourceIndexStep.java | 6 +- .../xpack/core/ilm/UpdateSettingsStep.java | 4 +- .../core/ml/annotations/AnnotationIndex.java | 15 ++-- .../persistence/AnomalyDetectorsIndex.java | 6 +- .../persistence/ElasticsearchMappings.java | 8 +- .../xpack/core/ml/utils/MlIndexAndAlias.java | 24 ++---- .../ml/utils/MlPlatformArchitecturesUtil.java | 12 +-- .../core/ml/utils/MlIndexAndAliasTests.java | 3 + .../TransformDeprecationChecker.java | 33 ++++---- .../TransportDeprecationInfoAction.java | 14 ++-- .../action/InternalExecutePolicyAction.java | 5 +- .../TransportDeleteEnrichPolicyAction.java | 28 ++++--- .../TransportPutEnrichPolicyAction.java | 8 +- .../syncjob/ConnectorSyncJobIndexService.java | 7 +- ...ortRenderSearchApplicationQueryAction.java | 6 +- .../xpack/esql/session/EsqlSession.java | 2 +- .../action/TransportDeleteSecretAction.java | 8 +- .../action/TransportGetSecretAction.java | 8 +- .../action/TransportPostSecretAction.java | 5 +- ...nsportDeleteSamlServiceProviderAction.java | 13 ++- .../ApplicationActionsResolver.java | 6 +- .../idp/privileges/UserPrivilegeResolver.java | 20 ++--- .../saml/authn/SamlAuthnRequestValidator.java | 30 ++++--- .../idp/saml/idp/SamlIdentityProvider.java | 10 +-- .../idp/saml/idp/SamlMetadataGenerator.java | 10 +-- .../services/elser/ElserMlNodeService.java | 10 ++- .../action/TransportGetPipelineAction.java | 9 +- .../action/TransportPutPipelineAction.java | 7 +- .../TransportDeleteCalendarEventAction.java | 2 +- ...ansportDeleteDataFrameAnalyticsAction.java | 11 ++- .../TransportDeleteExpiredDataAction.java | 9 +- .../ml/action/TransportDeleteJobAction.java | 8 +- .../TransportEvaluateDataFrameAction.java | 6 +- .../action/TransportGetDatafeedsAction.java | 2 +- .../action/TransportUpdateFilterAction.java | 8 +- 42 files changed, 242 insertions(+), 251 deletions(-) diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/TransportRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/TransportRethrottleAction.java index bc89928358dc2..68e7d14038b67 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/TransportRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/TransportRethrottleAction.java @@ -101,10 +101,10 @@ private static void rethrottleParentTask( subRequest.setRequestsPerSecond(newRequestsPerSecond / runningSubtasks); subRequest.setTargetParentTaskId(new TaskId(localNodeId, task.getId())); logger.debug("rethrottling children of task [{}] to [{}] requests per second", task.getId(), subRequest.getRequestsPerSecond()); - client.execute(ReindexPlugin.RETHROTTLE_ACTION, subRequest, ActionListener.wrap(r -> { + client.execute(ReindexPlugin.RETHROTTLE_ACTION, subRequest, listener.delegateFailureAndWrap((l, r) -> { r.rethrowFailures("Rethrottle"); - listener.onResponse(task.taskInfoGivenSubtaskInfo(localNodeId, r.getTasks())); - }, listener::onFailure)); + l.onResponse(task.taskInfoGivenSubtaskInfo(localNodeId, r.getTasks())); + })); } else { logger.debug("children of task [{}] are already finished, nothing to rethrottle", task.getId()); listener.onResponse(task.taskInfo(localNodeId, true)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java index 80af095005c9d..ec8e0ce87ee56 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/TransportRethrottleActionTests.java @@ -34,9 +34,11 @@ import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.theInstance; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.any; import static org.mockito.Mockito.atMost; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportRethrottleActionTests extends ESTestCase { private int slices; @@ -65,6 +67,7 @@ private void rethrottleTestCase( float newRequestsPerSecond = randomValueOtherThanMany(f -> f <= 0, () -> randomFloat()); @SuppressWarnings("unchecked") ActionListener listener = mock(ActionListener.class); + when(listener.delegateFailureAndWrap(any())).thenCallRealMethod(); TransportRethrottleAction.rethrottle(logger, localNodeId, client, task, newRequestsPerSecond, listener); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 1b3b321a530e6..f164e3342fb60 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -710,7 +710,7 @@ Map createFinalResponse() { remoteClusterService.maybeEnsureConnectedAndGetConnection( clusterAlias, skipUnavailable == false, - ActionListener.wrap(connection -> { + singleListener.delegateFailureAndWrap((delegate, connection) -> { final String[] indices = entry.getValue().indices(); final Executor responseExecutor = transportService.getThreadPool().executor(ThreadPool.Names.SEARCH_COORDINATION); // TODO: support point-in-time @@ -729,7 +729,7 @@ Map createFinalResponse() { TransportSearchShardsAction.TYPE.name(), searchShardsRequest, TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(singleListener, SearchShardsResponse::new, responseExecutor) + new ActionListenerResponseHandler<>(delegate, SearchShardsResponse::new, responseExecutor) ); } else { // does not do a can-match @@ -742,13 +742,13 @@ Map createFinalResponse() { searchShardsRequest, TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>( - singleListener.map(SearchShardsResponse::fromLegacyResponse), + delegate.map(SearchShardsResponse::fromLegacyResponse), ClusterSearchShardsResponse::new, responseExecutor ) ); } - }, singleListener::onFailure) + }) ); } } diff --git a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java index 728eae67f22cd..968e64fcc3888 100644 --- a/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java +++ b/server/src/main/java/org/elasticsearch/upgrades/SystemIndexMigrator.java @@ -30,10 +30,10 @@ import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetadataUpdateSettingsService; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -418,7 +418,7 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer innerListener = ActionListener.wrap(listener::accept, this::markAsFailed); try { - createIndex(migrationInfo, ActionListener.wrap(shardsAcknowledgedResponse -> { + createIndex(migrationInfo, innerListener.delegateFailureAndWrap((delegate, shardsAcknowledgedResponse) -> { logger.debug( "while migrating [{}] , got create index response: [{}]", oldIndexName, @@ -427,45 +427,45 @@ private void migrateSingleIndex(ClusterState clusterState, Consumer reindex(migrationInfo, ActionListener.wrap(bulkByScrollResponse -> { - logger.debug( - "while migrating [{}], got reindex response: [{}]", - oldIndexName, - Strings.toString(bulkByScrollResponse) - ); - if ((bulkByScrollResponse.getBulkFailures() != null && bulkByScrollResponse.getBulkFailures().isEmpty() == false) - || (bulkByScrollResponse.getSearchFailures() != null - && bulkByScrollResponse.getSearchFailures().isEmpty() == false)) { - removeReadOnlyBlockOnReindexFailure( - oldIndex, - innerListener, - logAndThrowExceptionForFailures(bulkByScrollResponse) + delegate.delegateFailureAndWrap( + (delegate2, setReadOnlyResponse) -> reindex(migrationInfo, ActionListener.wrap(bulkByScrollResponse -> { + logger.debug( + "while migrating [{}], got reindex response: [{}]", + oldIndexName, + Strings.toString(bulkByScrollResponse) ); - } else { - // Successful completion of reindexing - remove read only and delete old index - setWriteBlock( - oldIndex, - false, - ActionListener.wrap( - setAliasAndRemoveOldIndex(migrationInfo, bulkByScrollResponse, innerListener), - innerListener::onFailure - ) + if ((bulkByScrollResponse.getBulkFailures() != null + && bulkByScrollResponse.getBulkFailures().isEmpty() == false) + || (bulkByScrollResponse.getSearchFailures() != null + && bulkByScrollResponse.getSearchFailures().isEmpty() == false)) { + removeReadOnlyBlockOnReindexFailure( + oldIndex, + delegate2, + logAndThrowExceptionForFailures(bulkByScrollResponse) + ); + } else { + // Successful completion of reindexing - remove read only and delete old index + setWriteBlock( + oldIndex, + false, + delegate2.delegateFailureAndWrap(setAliasAndRemoveOldIndex(migrationInfo, bulkByScrollResponse)) + ); + } + }, e -> { + logger.error( + () -> format( + "error occurred while reindexing index [%s] from feature [%s] to destination index [%s]", + oldIndexName, + migrationInfo.getFeatureName(), + newIndexName + ), + e ); - } - }, e -> { - logger.error( - () -> format( - "error occurred while reindexing index [%s] from feature [%s] to destination index [%s]", - oldIndexName, - migrationInfo.getFeatureName(), - newIndexName - ), - e - ); - removeReadOnlyBlockOnReindexFailure(oldIndex, innerListener, e); - })), innerListener::onFailure) + removeReadOnlyBlockOnReindexFailure(oldIndex, delegate2, e); + })) + ) ); - }, innerListener::onFailure)); + })); } catch (Exception ex) { logger.error( () -> format( @@ -501,10 +501,9 @@ private void createIndex(SystemIndexMigrationInfo migrationInfo, ActionListener< metadataCreateIndexService.createIndex(createRequest, listener); } - private CheckedConsumer setAliasAndRemoveOldIndex( + private CheckedBiConsumer, AcknowledgedResponse, Exception> setAliasAndRemoveOldIndex( SystemIndexMigrationInfo migrationInfo, - BulkByScrollResponse bulkByScrollResponse, - ActionListener listener + BulkByScrollResponse bulkByScrollResponse ) { final IndicesAliasesRequestBuilder aliasesRequest = migrationInfo.createClient(baseClient).admin().indices().prepareAliases(); aliasesRequest.removeIndex(migrationInfo.getCurrentIndexName()); @@ -526,7 +525,7 @@ private CheckedConsumer setAliasAndRemoveOldInd // Technically this callback might have a different cluster state, but it shouldn't matter - these indices shouldn't be changing // while we're trying to migrate them. - return unsetReadOnlyResponse -> aliasesRequest.execute( + return (listener, unsetReadOnlyResponse) -> aliasesRequest.execute( listener.delegateFailureAndWrap((l, deleteIndexResponse) -> l.onResponse(bulkByScrollResponse)) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java index d91d989068203..815a61297767a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CcrRequests.java @@ -64,11 +64,11 @@ public static void getIndexMetadata( if (metadataVersion > 0) { request.waitForMetadataVersion(metadataVersion).waitForTimeout(timeoutSupplier.get()); } - client.admin().cluster().state(request, ActionListener.wrap(response -> { + client.admin().cluster().state(request, listener.delegateFailureAndWrap((delegate, response) -> { if (response.getState() == null) { // timeout on wait_for_metadata_version assert metadataVersion > 0 : metadataVersion; if (timeoutSupplier.get().nanos() < 0) { - listener.onFailure( + delegate.onFailure( new IllegalStateException( "timeout to get cluster state with" + " metadata version [" @@ -79,25 +79,25 @@ public static void getIndexMetadata( ) ); } else { - getIndexMetadata(client, index, mappingVersion, metadataVersion, timeoutSupplier, listener); + getIndexMetadata(client, index, mappingVersion, metadataVersion, timeoutSupplier, delegate); } } else { final Metadata metadata = response.getState().metadata(); final IndexMetadata indexMetadata = metadata.getIndexSafe(index); if (indexMetadata.getMappingVersion() >= mappingVersion) { - listener.onResponse(indexMetadata); + delegate.onResponse(indexMetadata); return; } if (timeoutSupplier.get().nanos() < 0) { - listener.onFailure( + delegate.onFailure( new IllegalStateException("timeout to get cluster state with mapping version [" + mappingVersion + "]") ); } else { // ask for the next version. - getIndexMetadata(client, index, mappingVersion, metadata.version() + 1, timeoutSupplier, listener); + getIndexMetadata(client, index, mappingVersion, metadata.version() + 1, timeoutSupplier, delegate); } } - }, listener::onFailure)); + })); } public static final RequestValidators.RequestValidator CCR_PUT_MAPPING_REQUEST_VALIDATOR = ( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java index 0d3c45ccedd3d..f94d7c6caae36 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportSetResetModeAction.java @@ -91,13 +91,15 @@ protected void masterOperation( listener.onFailure(e); }); - ActionListener clusterStateUpdateListener = ActionListener.wrap(acknowledgedResponse -> { - if (acknowledgedResponse.isAcknowledged() == false) { - wrappedListener.onFailure(new ElasticsearchTimeoutException("Unknown error occurred while updating cluster state")); - return; + ActionListener clusterStateUpdateListener = wrappedListener.delegateFailureAndWrap( + (delegate, acknowledgedResponse) -> { + if (acknowledgedResponse.isAcknowledged() == false) { + delegate.onFailure(new ElasticsearchTimeoutException("Unknown error occurred while updating cluster state")); + return; + } + delegate.onResponse(acknowledgedResponse); } - wrappedListener.onResponse(acknowledgedResponse); - }, wrappedListener::onFailure); + ); submitUnbatchedTask(featureName() + "-set-reset-mode", new AckedClusterStateUpdateTask(request, clusterStateUpdateListener) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java index 7cb36051b2cfc..bfbc32e11e93d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java @@ -85,7 +85,7 @@ public void performAction( return; } } - performDownsampleIndex(indexName, downsampleIndexName, ActionListener.wrap(listener::onResponse, listener::onFailure)); + performDownsampleIndex(indexName, downsampleIndexName, listener.delegateFailureAndWrap((l, r) -> l.onResponse(r))); } void performDownsampleIndex(String indexName, String downsampleIndexName, ActionListener listener) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStep.java index 9289ac79efbf5..82e4280dcc4cc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStep.java @@ -136,12 +136,12 @@ static void deleteSourceIndexAndTransferAliases( ); }); - client.admin().indices().aliases(aliasesRequest, ActionListener.wrap(response -> { + client.admin().indices().aliases(aliasesRequest, listener.delegateFailureAndWrap((l, response) -> { if (response.isAcknowledged() == false) { logger.warn("aliases swap from [{}] to [{}] response was not acknowledged", sourceIndexName, targetIndex); } - listener.onResponse(null); - }, listener::onFailure)); + l.onResponse(null); + })); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateSettingsStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateSettingsStep.java index bcaf50ed930c4..5d5b04cf78815 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateSettingsStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateSettingsStep.java @@ -45,9 +45,7 @@ public void performAction( UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indexMetadata.getIndex().getName()).masterNodeTimeout( TimeValue.MAX_VALUE ).settings(settings); - getClient().admin() - .indices() - .updateSettings(updateSettingsRequest, ActionListener.wrap(response -> listener.onResponse(null), listener::onFailure)); + getClient().admin().indices().updateSettings(updateSettingsRequest, listener.delegateFailureAndWrap((l, r) -> l.onResponse(null))); } public Settings getSettings() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java index eb4f4986fa193..09c7348cdc870 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/annotations/AnnotationIndex.java @@ -71,7 +71,7 @@ public static void createAnnotationsIndexIfNecessaryAndWaitForYellow( final ActionListener finalListener ) { - final ActionListener annotationsIndexCreatedListener = ActionListener.wrap(success -> { + final ActionListener annotationsIndexCreatedListener = finalListener.delegateFailureAndWrap((delegate, success) -> { final ClusterHealthRequest request = new ClusterHealthRequest(READ_ALIAS_NAME).waitForYellowStatus() .masterNodeTimeout(masterNodeTimeout); executeAsyncWithOrigin( @@ -79,9 +79,9 @@ public static void createAnnotationsIndexIfNecessaryAndWaitForYellow( ML_ORIGIN, ClusterHealthAction.INSTANCE, request, - ActionListener.wrap(r -> finalListener.onResponse(r.isTimedOut() == false), finalListener::onFailure) + delegate.delegateFailureAndWrap((l, r) -> l.onResponse(r.isTimedOut() == false)) ); - }, finalListener::onFailure); + }); createAnnotationsIndexIfNecessary(client, state, masterNodeTimeout, annotationsIndexCreatedListener); } @@ -97,17 +97,16 @@ public static void createAnnotationsIndexIfNecessary( final ActionListener finalListener ) { - final ActionListener checkMappingsListener = ActionListener.wrap( - success -> ElasticsearchMappings.addDocMappingIfMissing( + final ActionListener checkMappingsListener = finalListener.delegateFailureAndWrap( + (delegate, success) -> ElasticsearchMappings.addDocMappingIfMissing( WRITE_ALIAS_NAME, AnnotationIndex::annotationsMapping, client, state, masterNodeTimeout, - finalListener, + delegate, ANNOTATION_INDEX_MAPPINGS_VERSION - ), - finalListener::onFailure + ) ); final ActionListener createAliasListener = finalListener.delegateFailureAndWrap((finalDelegate, currentIndexName) -> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java index 37d070d90be76..2b622a1798508 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/AnomalyDetectorsIndex.java @@ -102,7 +102,7 @@ public static void createStateIndexAndAliasIfNecessaryAndWaitForYellow( TimeValue masterNodeTimeout, final ActionListener finalListener ) { - final ActionListener stateIndexAndAliasCreated = ActionListener.wrap(success -> { + final ActionListener stateIndexAndAliasCreated = finalListener.delegateFailureAndWrap((delegate, success) -> { final ClusterHealthRequest request = new ClusterHealthRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias()) .waitForYellowStatus() .masterNodeTimeout(masterNodeTimeout); @@ -111,9 +111,9 @@ public static void createStateIndexAndAliasIfNecessaryAndWaitForYellow( ML_ORIGIN, ClusterHealthAction.INSTANCE, request, - ActionListener.wrap(r -> finalListener.onResponse(r.isTimedOut() == false), finalListener::onFailure) + delegate.delegateFailureAndWrap((l, r) -> l.onResponse(r.isTimedOut() == false)) ); - }, finalListener::onFailure); + }); MlIndexAndAlias.createIndexAndAliasIfNecessary( client, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java index 4187762ca58c6..088275ddabb3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappings.java @@ -185,11 +185,11 @@ protected void doRun() throws Exception { ML_ORIGIN, PutMappingAction.INSTANCE, putMappingRequest, - ActionListener.wrap(response -> { + listener.delegateFailureAndWrap((delegate, response) -> { if (response.isAcknowledged()) { - listener.onResponse(true); + delegate.onResponse(true); } else { - listener.onFailure( + delegate.onFailure( new ElasticsearchStatusException( "Attempt to put missing mapping in indices " + Arrays.toString(indicesThatRequireAnUpdate) @@ -198,7 +198,7 @@ protected void doRun() throws Exception { ) ); } - }, listener::onFailure) + }) ); } else { logger.trace("Mappings are up to date."); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index 78a3493e8ae6b..d691cb0eb4c53 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -113,13 +113,13 @@ public static void createIndexAndAliasIfNecessary( }); // If both the index and alias were successfully created then wait for the shards of the index that the alias points to be ready - ActionListener indexCreatedListener = ActionListener.wrap(created -> { + ActionListener indexCreatedListener = loggingListener.delegateFailureAndWrap((delegate, created) -> { if (created) { - waitForShardsReady(client, alias, masterNodeTimeout, loggingListener); + waitForShardsReady(client, alias, masterNodeTimeout, delegate); } else { - loggingListener.onResponse(false); + delegate.onResponse(false); } - }, loggingListener::onFailure); + }); String legacyIndexWithoutSuffix = indexPatternPrefix; String indexPattern = indexPatternPrefix + "*"; @@ -218,10 +218,7 @@ public static void createSystemIndexIfNecessary( client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest, - ActionListener.wrap( - r -> indexCreatedListener.onResponse(r.isAcknowledged()), - indexCreatedListener::onFailure - ), + indexCreatedListener.delegateFailureAndWrap((l, r) -> l.onResponse(r.isAcknowledged())), client.admin().indices()::create ); } @@ -235,10 +232,7 @@ private static void waitForShardsReady(Client client, String index, TimeValue ma client.threadPool().getThreadContext(), ML_ORIGIN, healthRequest, - ActionListener.wrap( - response -> listener.onResponse(response.isTimedOut() == false), - listener::onFailure - ), + listener.delegateFailureAndWrap((l, response) -> l.onResponse(response.isTimedOut() == false)), client.admin().cluster()::health ); } @@ -371,12 +365,12 @@ public static void installIndexTemplateIfRequired( return; } - ActionListener innerListener = ActionListener.wrap(response -> { + ActionListener innerListener = listener.delegateFailureAndWrap((l, response) -> { if (response.isAcknowledged() == false) { logger.warn("error adding template [{}], request was not acknowledged", templateRequest.name()); } - listener.onResponse(response.isAcknowledged()); - }, listener::onFailure); + l.onResponse(response.isAcknowledged()); + }); executeAsyncWithOrigin(client, ML_ORIGIN, PutComposableIndexTemplateAction.INSTANCE, templateRequest, innerListener); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlPlatformArchitecturesUtil.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlPlatformArchitecturesUtil.java index 9802e06223332..c0f00cdada28f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlPlatformArchitecturesUtil.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlPlatformArchitecturesUtil.java @@ -48,9 +48,9 @@ static ActionListener getArchitecturesSetFromNodesInfoRespons ExecutorService executor, ActionListener> architecturesListener ) { - return ActionListener.wrap(nodesInfoResponse -> { - executor.execute(() -> { architecturesListener.onResponse(getArchitecturesSetFromNodesInfoResponse(nodesInfoResponse)); }); - }, architecturesListener::onFailure); + return architecturesListener.delegateFailureAndWrap( + (l, nodesInfoResponse) -> executor.execute(() -> l.onResponse(getArchitecturesSetFromNodesInfoResponse(nodesInfoResponse))) + ); } static NodesInfoRequestBuilder getNodesInfoBuilderWithMlNodeArchitectureInfo(Client client) { @@ -77,10 +77,10 @@ public static void verifyMlNodesAndModelArchitectures( String modelID = configToReturn.getModelId(); String modelPlatformArchitecture = configToReturn.getPlatformArchitecture(); - ActionListener> architecturesListener = ActionListener.wrap((architectures) -> { + ActionListener> architecturesListener = successOrFailureListener.delegateFailureAndWrap((l, architectures) -> { verifyMlNodesAndModelArchitectures(architectures, modelPlatformArchitecture, modelID); - successOrFailureListener.onResponse(configToReturn); - }, successOrFailureListener::onFailure); + l.onResponse(configToReturn); + }); getMlNodesArchitecturesSet(architecturesListener, client, executor); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index cbe0ba99e57ce..db18752cb91b7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -127,6 +127,7 @@ public void setUpMocks() { ); listener = mock(ActionListener.class); + when(listener.delegateFailureAndWrap(any())).thenCallRealMethod(); createRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); aliasesRequestCaptor = ArgumentCaptor.forClass(IndicesAliasesRequest.class); @@ -171,6 +172,7 @@ public void testInstallIndexTemplateIfRequired_GivenLegacyTemplateExistsAndModer listener ); InOrder inOrder = inOrder(client, listener); + inOrder.verify(listener).delegateFailureAndWrap(any()); inOrder.verify(client).execute(same(PutComposableIndexTemplateAction.INSTANCE), any(), any()); inOrder.verify(listener).onResponse(true); } @@ -236,6 +238,7 @@ public void testInstallIndexTemplateIfRequired() { listener ); InOrder inOrder = inOrder(client, listener); + inOrder.verify(listener).delegateFailureAndWrap(any()); inOrder.verify(client).execute(same(PutComposableIndexTemplateAction.INSTANCE), any(), any()); inOrder.verify(listener).onResponse(true); } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java index 45384afbec59e..0b5eb7ada7655 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransformDeprecationChecker.java @@ -33,9 +33,12 @@ public void check(Components components, ActionListener deprecation PageParams startPage = new PageParams(0, PageParams.DEFAULT_SIZE); List issues = new ArrayList<>(); - recursiveGetTransformsAndCollectDeprecations(components, issues, startPage, ActionListener.wrap(allIssues -> { - deprecationIssueListener.onResponse(new CheckResult(getName(), allIssues)); - }, deprecationIssueListener::onFailure)); + recursiveGetTransformsAndCollectDeprecations( + components, + issues, + startPage, + deprecationIssueListener.delegateFailureAndWrap((l, allIssues) -> l.onResponse(new CheckResult(getName(), allIssues))) + ); } @Override @@ -53,17 +56,17 @@ private static void recursiveGetTransformsAndCollectDeprecations( request.setPageParams(page); request.setAllowNoResources(true); - components.client().execute(GetTransformAction.INSTANCE, request, ActionListener.wrap(getTransformResponse -> { - for (TransformConfig config : getTransformResponse.getTransformConfigurations()) { - issues.addAll(config.checkForDeprecations(components.xContentRegistry())); - } - if (getTransformResponse.getTransformConfigurationCount() >= (page.getFrom() + page.getSize())) { - PageParams nextPage = new PageParams(page.getFrom() + page.getSize(), PageParams.DEFAULT_SIZE); - recursiveGetTransformsAndCollectDeprecations(components, issues, nextPage, listener); - } else { - listener.onResponse(issues); - } - - }, listener::onFailure)); + components.client() + .execute(GetTransformAction.INSTANCE, request, listener.delegateFailureAndWrap((delegate, getTransformResponse) -> { + for (TransformConfig config : getTransformResponse.getTransformConfigurations()) { + issues.addAll(config.checkForDeprecations(components.xContentRegistry())); + } + if (getTransformResponse.getTransformConfigurationCount() >= (page.getFrom() + page.getSize())) { + PageParams nextPage = new PageParams(page.getFrom() + page.getSize(), PageParams.DEFAULT_SIZE); + recursiveGetTransformsAndCollectDeprecations(components, issues, nextPage, delegate); + } else { + delegate.onResponse(issues); + } + })); } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java index 9aff1c010cac7..3c16830c2ba97 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/TransportDeprecationInfoAction.java @@ -105,7 +105,7 @@ protected final void masterOperation( ClientHelper.DEPRECATION_ORIGIN, NodesDeprecationCheckAction.INSTANCE, nodeDepReq, - ActionListener.wrap(response -> { + listener.delegateFailureAndWrap((delegate, response) -> { if (response.hasFailures()) { List failedNodeIds = response.failures() .stream() @@ -127,7 +127,7 @@ protected final void masterOperation( components, new ThreadedActionListener<>( client.threadPool().generic(), - listener.map( + delegate.map( deprecationIssues -> DeprecationInfoAction.Response.from( state, indexNameExpressionResolver, @@ -141,8 +141,7 @@ protected final void masterOperation( ) ) ); - - }, listener::onFailure) + }) ); } @@ -158,14 +157,13 @@ static void pluginSettingIssues( } GroupedActionListener groupedActionListener = new GroupedActionListener<>( enabledCheckers.size(), - ActionListener.wrap( - checkResults -> listener.onResponse( + listener.delegateFailureAndWrap( + (l, checkResults) -> l.onResponse( checkResults.stream() .collect( Collectors.toMap(DeprecationChecker.CheckResult::getCheckerName, DeprecationChecker.CheckResult::getIssues) ) - ), - listener::onFailure + ) ) ); for (DeprecationChecker checker : checkers) { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java index ff2759244a2f6..e606f6ac8ea9c 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/InternalExecutePolicyAction.java @@ -168,10 +168,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, try { ActionListener listener; if (request.isWaitForCompletion()) { - listener = ActionListener.wrap( - result -> actionListener.onResponse(new Response(result)), - actionListener::onFailure - ); + listener = actionListener.delegateFailureAndWrap((l, result) -> l.onResponse(new Response(result))); } else { listener = ActionListener.wrap( result -> LOGGER.debug("successfully executed policy [{}]", request.getName()), diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java index bf1327eb8efbe..5dec35149dc52 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportDeleteEnrichPolicyAction.java @@ -149,19 +149,21 @@ private void deleteIndicesAndPolicy(String[] indices, String name, ActionListene // as the setting 'action.destructive_requires_name' may be set to true DeleteIndexRequest deleteRequest = new DeleteIndexRequest().indices(indices).indicesOptions(LENIENT_OPTIONS); - new OriginSettingClient(client, ENRICH_ORIGIN).admin().indices().delete(deleteRequest, ActionListener.wrap((response) -> { - if (response.isAcknowledged() == false) { - listener.onFailure( - new ElasticsearchStatusException( - "Could not fetch indices to delete during policy delete of [{}]", - RestStatus.INTERNAL_SERVER_ERROR, - name - ) - ); - } else { - deletePolicy(name, listener); - } - }, listener::onFailure)); + new OriginSettingClient(client, ENRICH_ORIGIN).admin() + .indices() + .delete(deleteRequest, listener.delegateFailureAndWrap((delegate, response) -> { + if (response.isAcknowledged() == false) { + delegate.onFailure( + new ElasticsearchStatusException( + "Could not fetch indices to delete during policy delete of [{}]", + RestStatus.INTERNAL_SERVER_ERROR, + name + ) + ); + } else { + deletePolicy(name, delegate); + } + })); } private void deletePolicy(String name, ActionListener listener) { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java index 2cfc1dc8fffa0..7433863fcbd5d 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/TransportPutEnrichPolicyAction.java @@ -88,11 +88,11 @@ protected void masterOperation( privRequest.clusterPrivileges(Strings.EMPTY_ARRAY); privRequest.indexPrivileges(privileges); - ActionListener wrappedListener = ActionListener.wrap(r -> { + ActionListener wrappedListener = listener.delegateFailureAndWrap((delegate, r) -> { if (r.isCompleteMatch()) { - putPolicy(request, listener); + putPolicy(request, delegate); } else { - listener.onFailure( + delegate.onFailure( Exceptions.authorizationError( "unable to store policy because no indices match with the " + "specified index patterns {}", request.getPolicy().getIndices(), @@ -100,7 +100,7 @@ protected void masterOperation( ) ); } - }, listener::onFailure); + }); client.execute(HasPrivilegesAction.INSTANCE, privRequest, wrappedListener); } else { putPolicy(request, listener); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 326fdb0367e5c..a7d20414d4631 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -120,13 +120,12 @@ public void createConnectorSyncJob( clientWithOrigin.index( indexRequest, - ActionListener.wrap( - indexResponse -> listener.onResponse(new PostConnectorSyncJobAction.Response(indexResponse.getId())), - listener::onFailure + l.delegateFailureAndWrap( + (ll, indexResponse) -> ll.onResponse(new PostConnectorSyncJobAction.Response(indexResponse.getId())) ) ); } catch (IOException e) { - listener.onFailure(e); + l.onFailure(e); } })); } catch (Exception e) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java index 6f20bd4acb785..4a028a5558e87 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/TransportRenderSearchApplicationQueryAction.java @@ -66,10 +66,10 @@ protected void doExecute( SearchApplicationSearchRequest request, ActionListener listener ) { - systemIndexService.getSearchApplication(request.name(), ActionListener.wrap(searchApplication -> { + systemIndexService.getSearchApplication(request.name(), listener.delegateFailureAndWrap((delegate, searchApplication) -> { final Map renderedMetadata = templateService.renderTemplate(searchApplication, request.queryParams()); final SearchSourceBuilder sourceBuilder = templateService.renderQuery(searchApplication, renderedMetadata); - listener.onResponse(new RenderSearchApplicationQueryAction.Response(request.name(), sourceBuilder)); - }, listener::onFailure)); + delegate.onResponse(new RenderSearchApplicationQueryAction.Response(request.name(), sourceBuilder)); + })); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 7adcb0a1f9623..284c78c6e0121 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -167,7 +167,7 @@ private void preAnalyze(LogicalPlan parsed, BiFunction l.onResponse(action.apply(indexResolution, resolution)), listener::onFailure), + l.delegateFailureAndWrap((ll, indexResolution) -> ll.onResponse(action.apply(indexResolution, resolution))), matchFields ); }); diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportDeleteSecretAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportDeleteSecretAction.java index b45d89f788b53..520efe269eb96 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportDeleteSecretAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportDeleteSecretAction.java @@ -34,12 +34,12 @@ public TransportDeleteSecretAction(TransportService transportService, ActionFilt @Override protected void doExecute(Task task, DeleteSecretRequest request, ActionListener listener) { - client.prepareDelete(FLEET_SECRETS_INDEX_NAME, request.id()).execute(ActionListener.wrap(deleteResponse -> { + client.prepareDelete(FLEET_SECRETS_INDEX_NAME, request.id()).execute(listener.delegateFailureAndWrap((delegate, deleteResponse) -> { if (deleteResponse.getResult() == Result.NOT_FOUND) { - listener.onFailure(new ResourceNotFoundException("No secret with id [" + request.id() + "]")); + delegate.onFailure(new ResourceNotFoundException("No secret with id [" + request.id() + "]")); return; } - listener.onResponse(new DeleteSecretResponse(deleteResponse.getResult() == Result.DELETED)); - }, listener::onFailure)); + delegate.onResponse(new DeleteSecretResponse(deleteResponse.getResult() == Result.DELETED)); + })); } } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportGetSecretAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportGetSecretAction.java index f1e6f5a4ed864..4c8311924ab4b 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportGetSecretAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportGetSecretAction.java @@ -31,12 +31,12 @@ public TransportGetSecretAction(TransportService transportService, ActionFilters } protected void doExecute(Task task, GetSecretRequest request, ActionListener listener) { - client.prepareGet(FLEET_SECRETS_INDEX_NAME, request.id()).execute(ActionListener.wrap(getResponse -> { + client.prepareGet(FLEET_SECRETS_INDEX_NAME, request.id()).execute(listener.delegateFailureAndWrap((delegate, getResponse) -> { if (getResponse.isSourceEmpty()) { - listener.onFailure(new ResourceNotFoundException("No secret with id [" + request.id() + "]")); + delegate.onFailure(new ResourceNotFoundException("No secret with id [" + request.id() + "]")); return; } - listener.onResponse(new GetSecretResponse(getResponse.getId(), getResponse.getSource().get("value").toString())); - }, listener::onFailure)); + delegate.onResponse(new GetSecretResponse(getResponse.getId(), getResponse.getSource().get("value").toString())); + })); } } diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java index 2c3d6f7d98dc2..c87c4b58559ea 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/TransportPostSecretAction.java @@ -36,10 +36,7 @@ protected void doExecute(Task task, PostSecretRequest request, ActionListener listener.onResponse(new PostSecretResponse(indexResponse.getId())), - listener::onFailure - ) + listener.delegateFailureAndWrap((l, indexResponse) -> l.onResponse(new PostSecretResponse(indexResponse.getId()))) ); } catch (Exception e) { listener.onFailure(e); diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java index d86268db44f64..fa0c510fde5c0 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/action/TransportDeleteSamlServiceProviderAction.java @@ -54,9 +54,9 @@ protected void doExecute( final ActionListener listener ) { final String entityId = request.getEntityId(); - index.findByEntityId(entityId, ActionListener.wrap(matchingDocuments -> { + index.findByEntityId(entityId, listener.delegateFailureAndWrap((delegate, matchingDocuments) -> { if (matchingDocuments.isEmpty()) { - listener.onResponse(new DeleteSamlServiceProviderResponse(null, entityId)); + delegate.onResponse(new DeleteSamlServiceProviderResponse(null, entityId)); } else if (matchingDocuments.size() == 1) { final SamlServiceProviderIndex.DocumentSupplier docInfo = Iterables.get(matchingDocuments, 0); final SamlServiceProviderDocument existingDoc = docInfo.getDocument(); @@ -66,9 +66,8 @@ protected void doExecute( index.deleteDocument( docInfo.version, request.getRefreshPolicy(), - ActionListener.wrap( - deleteResponse -> listener.onResponse(new DeleteSamlServiceProviderResponse(deleteResponse, entityId)), - listener::onFailure + delegate.delegateFailureAndWrap( + (l, deleteResponse) -> l.onResponse(new DeleteSamlServiceProviderResponse(deleteResponse, entityId)) ) ); } else { @@ -78,8 +77,8 @@ protected void doExecute( entityId, matchingDocuments.stream().map(d -> d.getDocument().docId).collect(Collectors.joining(",")) ); - listener.onFailure(new IllegalStateException("Multiple service providers exist with entity id [" + entityId + "]")); + delegate.onFailure(new IllegalStateException("Multiple service providers exist with entity id [" + entityId + "]")); } - }, listener::onFailure)); + })); } } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java index 09635557d0e0f..5dbba28a1e6fd 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/ApplicationActionsResolver.java @@ -134,14 +134,14 @@ public void getActions(String application, ActionListener> listener) private void loadActions(String applicationName, ActionListener> listener) { final GetPrivilegesRequest request = new GetPrivilegesRequest(); request.application(applicationName); - this.client.execute(GetPrivilegesAction.INSTANCE, request, ActionListener.wrap(response -> { + this.client.execute(GetPrivilegesAction.INSTANCE, request, listener.delegateFailureAndWrap((delegate, response) -> { final Set fixedActions = Stream.of(response.privileges()) .map(p -> p.getActions()) .flatMap(Collection::stream) .filter(s -> s.indexOf('*') == -1) .collect(Collectors.toUnmodifiableSet()); cache.put(applicationName, fixedActions); - listener.onResponse(fixedActions); - }, listener::onFailure)); + delegate.onResponse(fixedActions); + })); } } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java index eeb5e91f29ced..c4ffe65feae5a 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/privileges/UserPrivilegeResolver.java @@ -78,10 +78,10 @@ public UserPrivilegeResolver(Client client, SecurityContext securityContext, App * Requires that the active user is set in the {@link org.elasticsearch.xpack.core.security.SecurityContext}. */ public void resolve(ServiceProviderPrivileges service, ActionListener listener) { - buildResourcePrivilege(service, ActionListener.wrap(resourcePrivilege -> { + buildResourcePrivilege(service, listener.delegateFailureAndWrap((delegate, resourcePrivilege) -> { final String username = securityContext.requireUser().principal(); if (resourcePrivilege == null) { - listener.onResponse(UserPrivileges.noAccess(username)); + delegate.onResponse(UserPrivileges.noAccess(username)); return; } HasPrivilegesRequest request = new HasPrivilegesRequest(); @@ -89,7 +89,7 @@ public void resolve(ServiceProviderPrivileges service, ActionListener { + client.execute(HasPrivilegesAction.INSTANCE, request, delegate.delegateFailureAndWrap((l, response) -> { logger.debug( "Checking access for user [{}] to application [{}] resource [{}]", username, @@ -98,9 +98,9 @@ public void resolve(ServiceProviderPrivileges service, ActionListener listener ) { - actionsResolver.getActions(service.getApplicationName(), ActionListener.wrap(actions -> { + actionsResolver.getActions(service.getApplicationName(), listener.delegateFailureAndWrap((delegate, actions) -> { if (actions == null || actions.isEmpty()) { logger.warn("No application-privilege actions defined for application [{}]", service.getApplicationName()); - listener.onResponse(null); + delegate.onResponse(null); } else { logger.debug("Using actions [{}] for application [{}]", actions, service.getApplicationName()); final RoleDescriptor.ApplicationResourcePrivileges.Builder builder = RoleDescriptor.ApplicationResourcePrivileges.builder(); builder.application(service.getApplicationName()); builder.resources(service.getResource()); builder.privileges(actions); - listener.onResponse(builder.build()); + delegate.onResponse(builder.build()); } - }, listener::onFailure)); + })); } } diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java index 0313040b7e8ae..9fc9f4a28d250 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/authn/SamlAuthnRequestValidator.java @@ -101,16 +101,20 @@ public void processQueryString(String queryString, ActionListener { - try { - validateAuthnRequest(authnRequest, sp, parsedQueryString, listener); - } catch (ElasticsearchSecurityException e) { - logger.debug("Could not validate AuthnRequest", e); - listener.onFailure(e); - } catch (Exception e) { - logAndRespond("Could not validate AuthnRequest", e, listener); - } - }, listener::onFailure)); + getSpFromAuthnRequest( + authnRequest.getIssuer(), + authnRequest.getAssertionConsumerServiceURL(), + listener.delegateFailureAndWrap((l, sp) -> { + try { + validateAuthnRequest(authnRequest, sp, parsedQueryString, l); + } catch (ElasticsearchSecurityException e) { + logger.debug("Could not validate AuthnRequest", e); + l.onFailure(e); + } catch (Exception e) { + logAndRespond("Could not validate AuthnRequest", e, l); + } + }) + ); } catch (ElasticsearchSecurityException e) { logger.debug("Could not process AuthnRequest", e); listener.onFailure(e); @@ -277,7 +281,7 @@ private void getSpFromAuthnRequest(Issuer issuer, String acs, ActionListener { + idp.resolveServiceProvider(issuerString, acs, false, listener.delegateFailureAndWrap((delegate, serviceProvider) -> { if (null == serviceProvider) { throw new ElasticsearchSecurityException( "Service Provider with Entity ID [{}] and ACS [{}] is not known to this Identity Provider", @@ -286,8 +290,8 @@ private void getSpFromAuthnRequest(Issuer issuer, String acs, ActionListener listener ) { - serviceProviderResolver.resolve(spEntityId, ActionListener.wrap(sp -> { + serviceProviderResolver.resolve(spEntityId, listener.delegateFailureAndWrap((delegate, sp) -> { if (sp == null) { logger.debug("No explicitly registered service provider exists for entityId [{}]", spEntityId); - resolveWildcardService(spEntityId, acs, listener); + resolveWildcardService(spEntityId, acs, delegate); } else if (allowDisabled == false && sp.isEnabled() == false) { logger.info("Service provider [{}][{}] is not enabled", spEntityId, sp.getName()); - listener.onResponse(null); + delegate.onResponse(null); } else { logger.debug("Service provider for [{}] is [{}]", spEntityId, sp); - listener.onResponse(sp); + delegate.onResponse(sp); } - }, listener::onFailure)); + })); } private void resolveWildcardService(String spEntityId, String acs, ActionListener listener) { diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java index dc4a9edbf22f4..13b2c461a6623 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/idp/SamlMetadataGenerator.java @@ -43,10 +43,10 @@ public SamlMetadataGenerator(SamlFactory samlFactory, SamlIdentityProvider idp) } public void generateMetadata(String spEntityId, String acs, ActionListener listener) { - idp.resolveServiceProvider(spEntityId, acs, true, ActionListener.wrap(sp -> { + idp.resolveServiceProvider(spEntityId, acs, true, listener.delegateFailureAndWrap((delegate, sp) -> { try { if (null == sp) { - listener.onFailure( + delegate.onFailure( new IllegalArgumentException( "Service provider with Entity ID [" + spEntityId + "] is not registered with this Identity Provider" ) @@ -56,12 +56,12 @@ public void generateMetadata(String spEntityId, String acs, ActionListener listener) { client.execute( StartTrainedModelDeploymentAction.INSTANCE, startRequest, - ActionListener.wrap(r -> listener.onResponse(Boolean.TRUE), listener::onFailure) + listener.delegateFailureAndWrap((l, r) -> l.onResponse(Boolean.TRUE)) ); } @@ -181,9 +181,11 @@ public void infer(Model model, List input, Map taskSetti input, TimeValue.timeValueSeconds(10) // TODO get timeout from request ); - client.execute(InferTrainedModelDeploymentAction.INSTANCE, request, ActionListener.wrap(inferenceResult -> { - listener.onResponse(SparseEmbeddingResults.of(inferenceResult.getResults())); - }, listener::onFailure)); + client.execute( + InferTrainedModelDeploymentAction.INSTANCE, + request, + listener.delegateFailureAndWrap((l, inferenceResult) -> l.onResponse(SparseEmbeddingResults.of(inferenceResult.getResults()))) + ); } private static ElserMlNodeTaskSettings taskSettingsFromMap(TaskType taskType, Map config) { diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java index b4b9c4fabac54..6d9a244c13dce 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineAction.java @@ -186,17 +186,16 @@ private void handleFilteringSearchResponse( client.prepareSearchScroll(searchResponse.getScrollId()) .setScroll(TimeValue.timeValueMinutes(1L)) .execute( - ActionListener.wrap( - searchResponse1 -> handleFilteringSearchResponse( + listener.delegateFailureAndWrap( + (delegate, searchResponse1) -> handleFilteringSearchResponse( searchResponse1, pipelineSources, explicitPipelineIds, wildcardPipelinePatterns, numberOfHitsSeenSoFar, clearScroll, - listener - ), - listener::onFailure + delegate + ) ) ); } diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportPutPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportPutPipelineAction.java index 701a7bed33916..7125cc12f6cfd 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportPutPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/action/TransportPutPipelineAction.java @@ -37,11 +37,6 @@ protected void doExecute(Task task, PutPipelineRequest request, ActionListener

    listener.onResponse(new PutPipelineResponse(indexResponse.status())), - listener::onFailure - ) - ); + .execute(listener.delegateFailureAndWrap((l, indexResponse) -> l.onResponse(new PutPipelineResponse(indexResponse.status())))); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 8739c446c084d..d7a50b5f87f04 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -124,7 +124,7 @@ public void onResponse(DeleteResponse response) { } else { jobManager.updateProcessOnCalendarChanged( calendar.getJobIds(), - ActionListener.wrap(r -> listener.onResponse(AcknowledgedResponse.TRUE), listener::onFailure) + listener.delegateFailureAndWrap((l, r) -> l.onResponse(AcknowledgedResponse.TRUE)) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java index 10679f447db15..efd65b5ac3282 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteDataFrameAnalyticsAction.java @@ -107,9 +107,8 @@ private void forceDelete( ) { logger.debug("[{}] Force deleting data frame analytics job", request.getId()); - ActionListener stopListener = ActionListener.wrap( - stopResponse -> normalDelete(parentTaskClient, clusterService.state(), request, listener), - listener::onFailure + ActionListener stopListener = listener.delegateFailureAndWrap( + (l, stopResponse) -> normalDelete(parentTaskClient, clusterService.state(), request, l) ); stopJob(parentTaskClient, request, stopListener); @@ -168,10 +167,10 @@ private void normalDelete( // We clean up the memory tracker on delete because there is no stop; the task stops by itself memoryTracker.removeDataFrameAnalyticsJob(id); - configProvider.get(id, ActionListener.wrap(config -> { + configProvider.get(id, listener.delegateFailureAndWrap((l, config) -> { DataFrameAnalyticsDeleter deleter = new DataFrameAnalyticsDeleter(parentTaskClient, auditor); - deleter.deleteAllDocuments(config, request.timeout(), listener); - }, listener::onFailure)); + deleter.deleteAllDocuments(config, request.timeout(), l); + })); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index f51498815c40e..b28d37022e171 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -192,16 +192,15 @@ void deleteExpiredData( ) { if (haveAllPreviousDeletionsCompleted && mlDataRemoversIterator.hasNext()) { MlDataRemover remover = mlDataRemoversIterator.next(); - ActionListener nextListener = ActionListener.wrap( - booleanResponse -> deleteExpiredData( + ActionListener nextListener = listener.delegateFailureAndWrap( + (delegate, booleanResponse) -> deleteExpiredData( request, mlDataRemoversIterator, requestsPerSecond, - listener, + delegate, isTimedOutSupplier, booleanResponse - ), - listener::onFailure + ) ); // Removing expired ML data and artifacts requires multiple operations. // These are queued up and executed sequentially in the action listener, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 7842af8b12993..f3b0fcd669637 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -167,13 +167,13 @@ protected void masterOperation( } ); - ActionListener markAsDeletingListener = ActionListener.wrap(response -> { + ActionListener markAsDeletingListener = finalListener.delegateFailureAndWrap((delegate, response) -> { if (request.isForce()) { - forceDeleteJob(parentTaskClient, request, state, finalListener); + forceDeleteJob(parentTaskClient, request, state, delegate); } else { - normalDeleteJob(parentTaskClient, request, state, finalListener); + normalDeleteJob(parentTaskClient, request, state, delegate); } - }, finalListener::onFailure); + }); ActionListener datafeedDeleteListener = ActionListener.wrap(response -> { auditor.info(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_DELETING, taskId)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java index 4336489ce5d24..3865858f527b4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEvaluateDataFrameAction.java @@ -146,13 +146,13 @@ private TypedChainTaskExecutor.ChainTask nextTask() { SearchRequest searchRequest = new SearchRequest(request.getIndices()).source(searchSourceBuilder); useSecondaryAuthIfAvailable( securityContext, - () -> client.execute(TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { + () -> client.execute(TransportSearchAction.TYPE, searchRequest, listener.delegateFailureAndWrap((l, searchResponse) -> { evaluation.process(searchResponse); if (evaluation.hasAllResults() == false) { add(nextTask()); } - listener.onResponse(null); - }, listener::onFailure)) + l.onResponse(null); + })) ); }; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java index ebbe06e69ba63..db6c962abbf55 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDatafeedsAction.java @@ -68,7 +68,7 @@ protected void masterOperation( datafeedManager.getDatafeeds( request, parentTaskId, - ActionListener.wrap(datafeeds -> listener.onResponse(new GetDatafeedsAction.Response(datafeeds)), listener::onFailure) + listener.delegateFailureAndWrap((l, datafeeds) -> l.onResponse(new GetDatafeedsAction.Response(datafeeds))) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java index 7fccc722d0c88..622d5ccab6940 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -76,9 +76,9 @@ public TransportUpdateFilterAction( @Override protected void doExecute(Task task, UpdateFilterAction.Request request, ActionListener listener) { - ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { - updateFilter(filterWithVersion, request, listener); - }, listener::onFailure); + ActionListener filterListener = listener.delegateFailureAndWrap( + (l, filterWithVersion) -> updateFilter(filterWithVersion, request, l) + ); getFilterWithVersion(request.getFilterId(), filterListener); } @@ -142,7 +142,7 @@ public void onResponse(DocWriteResponse indexResponse) { filter, request.getAddItems(), request.getRemoveItems(), - ActionListener.wrap(response -> listener.onResponse(new PutFilterAction.Response(filter)), listener::onFailure) + listener.delegateFailureAndWrap((l, response) -> l.onResponse(new PutFilterAction.Response(filter))) ); } From 79c874fbd1a7ae6124c88b3e5db6cf321357e3f9 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 5 Dec 2023 17:41:11 +0000 Subject: [PATCH 229/263] [ML] Disable concurrency for frequent items aggregation (#102999) The frequent items aggregation suffers from a significant increase in garbage collection activity when executed in parallel across slices. It does not run much faster when parallelised, so the simplest fix is to disable parallelisation. --- .../frequentitemsets/FrequentItemSetsAggregationBuilder.java | 5 +++++ .../FrequentItemSetsAggregationBuilderTests.java | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilder.java index 684d61dbdedb3..b6bb013e86421 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilder.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.function.ToLongFunction; import static org.elasticsearch.common.Strings.format; @@ -264,4 +265,8 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersions.V_8_4_0; } + @Override + public boolean supportsParallelCollection(ToLongFunction fieldCardinalityResolver) { + return false; + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilderTests.java index 9534ace3d3b9b..a2b7d0bfbe84c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregationBuilderTests.java @@ -248,4 +248,9 @@ private static IncludeExclude randomIncludeExclude() { return new IncludeExclude(null, null, null, new TreeSet<>(Set.of(newBytesRef("exclude")))); } } + + public void testSupportsParallelCollection() { + FrequentItemSetsAggregationBuilder frequentItemSetsAggregationBuilder = randomFrequentItemsSetsAggregationBuilder(); + assertFalse(frequentItemSetsAggregationBuilder.supportsParallelCollection(null)); + } } From e8907da8792ed919d4418ef83c5a1b68ee6d3891 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 5 Dec 2023 19:28:22 +0100 Subject: [PATCH 230/263] [Connectors API] Enable as technical preview (#102994) --- docs/changelog/102994.yaml | 5 +++++ .../rest-api-spec/api/connector.check_in.json | 3 +-- .../rest-api-spec/api/connector.delete.json | 3 +-- .../resources/rest-api-spec/api/connector.get.json | 3 +-- .../rest-api-spec/api/connector.last_sync.json | 3 +-- .../resources/rest-api-spec/api/connector.list.json | 3 +-- .../resources/rest-api-spec/api/connector.post.json | 3 +-- .../resources/rest-api-spec/api/connector.put.json | 3 +-- .../api/connector.update_configuration.json | 3 +-- .../rest-api-spec/api/connector.update_error.json | 3 +-- .../api/connector.update_filtering.json | 3 +-- .../rest-api-spec/api/connector.update_name.json | 3 +-- .../rest-api-spec/api/connector.update_pipeline.json | 3 +-- .../api/connector.update_scheduling.json | 3 +-- .../rest-api-spec/api/connector_sync_job.cancel.json | 3 +-- .../api/connector_sync_job.check_in.json | 3 +-- .../rest-api-spec/api/connector_sync_job.delete.json | 3 +-- .../rest-api-spec/api/connector_sync_job.error.json | 3 +-- .../rest-api-spec/api/connector_sync_job.get.json | 3 +-- .../rest-api-spec/api/connector_sync_job.list.json | 3 +-- .../rest-api-spec/api/connector_sync_job.post.json | 3 +-- .../api/connector_sync_job.update_stats.json | 3 +-- .../application/connector/ConnectorAPIFeature.java | 12 +++++++++++- 23 files changed, 37 insertions(+), 43 deletions(-) create mode 100644 docs/changelog/102994.yaml diff --git a/docs/changelog/102994.yaml b/docs/changelog/102994.yaml new file mode 100644 index 0000000000000..c35baaefcb723 --- /dev/null +++ b/docs/changelog/102994.yaml @@ -0,0 +1,5 @@ +pr: 102994 +summary: Enable Connectors API as technical preview +area: Application +type: feature +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json index a9db92aa450e0..e95621d30fc16 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json @@ -5,8 +5,7 @@ "description": "Updates the last_seen timestamp in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json index 6cfc0ffcaf02b..dcb3a4f83c287 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json @@ -5,8 +5,7 @@ "description": "Deletes a connector." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json index d866920324852..bcddef8cb5cb9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json @@ -5,8 +5,7 @@ "description": "Returns the details about a connector." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json index 43b7b078eef58..7bc1504253070 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json @@ -5,8 +5,7 @@ "description": "Updates the stats of last sync in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json index a1e5ddcc5d686..852a5fbd85998 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json @@ -5,8 +5,7 @@ "description": "Lists all connectors." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json index 2dfaf150c455a..e76124bbecf7d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json @@ -5,8 +5,7 @@ "description": "Creates a connector." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json index 8511b870a2d12..0ab5c18671040 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json @@ -5,8 +5,7 @@ "description": "Creates or updates a connector." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json index 347418940b4c9..a82f9e0f29225 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json @@ -5,8 +5,7 @@ "description": "Updates the connector configuration." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json index 5d82a3729b501..51d5a1b25973b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json @@ -5,8 +5,7 @@ "description": "Updates the error field in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json index 6923dc88006e3..b9815fc111c06 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json @@ -5,8 +5,7 @@ "description": "Updates the filtering field in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json index e42d9b5766b0a..dabac5599932b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json @@ -5,8 +5,7 @@ "description": "Updates the name and/or description fields in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json index 2bd1acf7d28a6..25687e41a48de 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json @@ -5,8 +5,7 @@ "description": "Updates the pipeline field in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json index 8d45e588a75ef..8d934b8025145 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json @@ -5,8 +5,7 @@ "description": "Updates the scheduling field in the connector document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json index 883dd54bcb89b..dbea6935f8a87 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json @@ -5,8 +5,7 @@ "description": "Cancels a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json index 6c406a3a3d2c1..8193d92395255 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json @@ -5,8 +5,7 @@ "description": "Checks in a connector sync job (refreshes 'last_seen')." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json index de8ffff861a98..ba9b5095a5275 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json @@ -5,8 +5,7 @@ "description": "Deletes a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json index 5db061eda6e48..394e6e2fcb38f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json @@ -5,8 +5,7 @@ "description": "Sets an error for a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json index 6eb461ad62128..d0f14b0001bd8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json @@ -5,8 +5,7 @@ "description": "Returns the details about a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json index d09a720f748ec..86995477f060a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json @@ -5,8 +5,7 @@ "description": "Lists all connector sync jobs." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json index 563d0022d90d3..1db58c31dfa38 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json @@ -5,8 +5,7 @@ "description": "Creates a connector sync job." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json index 52f5a55cc8458..825e5d8939e2d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json @@ -5,8 +5,7 @@ "description": "Updates the stats fields in the connector sync job document." }, "stability": "experimental", - "visibility": "feature_flag", - "feature_flag": "es.connector_api_feature_flag_enabled", + "visibility": "public", "headers": { "accept": [ "application/json" diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorAPIFeature.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorAPIFeature.java index 40dcf02a2bf19..a3053e90335ad 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorAPIFeature.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorAPIFeature.java @@ -16,7 +16,17 @@ public class ConnectorAPIFeature { private static final FeatureFlag CONNECTOR_API_FEATURE_FLAG = new FeatureFlag("connector_api"); + /** + * Enables the Connectors API feature by default for the tech preview phase. + * As documented, the Connectors API is currently a tech preview feature, + * and customers should be aware that no SLAs or support are guaranteed during + * its pre-General Availability (GA) stage. + * + * Instead of removing the feature flag from the code, we enable it by default. + * This approach allows for the complete deactivation of the feature during the QA phase, + * should any critical bugs be discovered, with a single, trackable code change. + */ public static boolean isEnabled() { - return CONNECTOR_API_FEATURE_FLAG.isEnabled(); + return true; } } From 931c0fce8e5f2ad66a3645c469872c8638d02e16 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 5 Dec 2023 19:31:59 +0100 Subject: [PATCH 231/263] Mute ReverseNestedAggregatorTests (#103005) similar to https://github.com/elastic/elasticsearch/pull/102998, let's mute it until it gets fixed. relates https://github.com/elastic/elasticsearch/issues/102974 --- .../bucket/nested/ReverseNestedAggregatorTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 9c908f13d90bc..f6be5c2171193 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; @@ -43,6 +44,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.reverseNested; import static org.hamcrest.Matchers.equalTo; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public class ReverseNestedAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD_NAME = "number"; From c0edd65175db46711c8a40ae1f80eec2e8d31fb8 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 5 Dec 2023 19:04:23 +0000 Subject: [PATCH 232/263] AwaitsFix for #103012 --- .../repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java index 3b154cf4953be..af1af7dc53d19 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java @@ -63,6 +63,7 @@ protected Settings repositorySettings() { } @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103012") public void testRepositoryAnalysis() throws Exception { super.testRepositoryAnalysis(); } From 03dd28dced1ab53c3620168901d9d2b736b70835 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 5 Dec 2023 11:15:37 -0800 Subject: [PATCH 233/263] Capture JVM compiler replay data in build results archive (#103007) We want to capture compiler replay data so we can better troubleshoot the root cause of https://github.com/elastic/elasticsearch/issues/103004. --- .../gradle/internal/ElasticsearchBuildCompletePlugin.java | 1 + 1 file changed, 1 insertion(+) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index 4902168d9b4ff..bad3ebb11a0dd 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -86,6 +86,7 @@ private List resolveProjectLogs(File projectDir) { projectDirFiles.include("**/build/testclusters/**"); projectDirFiles.include("**/build/testrun/*/temp/**"); projectDirFiles.include("**/build/**/hs_err_pid*.log"); + projectDirFiles.include("**/build/**/replay_pid*.log"); projectDirFiles.exclude("**/build/testclusters/**/data/**"); projectDirFiles.exclude("**/build/testclusters/**/distro/**"); projectDirFiles.exclude("**/build/testclusters/**/repo/**"); From 6b6fd7b95766be6520d5907b3ad2ab729122a9a8 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 5 Dec 2023 15:43:48 -0500 Subject: [PATCH 234/263] Adding new DynamicMapperBenchmark to exercise dynamic mapping parsing (#103015) --- .../index/mapper/DynamicMapperBenchmark.java | 205 ++++++++++++++++++ 1 file changed, 205 insertions(+) create mode 100644 benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/DynamicMapperBenchmark.java diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/DynamicMapperBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/DynamicMapperBenchmark.java new file mode 100644 index 0000000000000..eae233e276038 --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/DynamicMapperBenchmark.java @@ -0,0 +1,205 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.benchmark.index.mapper; + +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.LuceneDocument; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.xcontent.XContentType; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.util.Arrays; +import java.util.List; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.DoubleStream; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +@Fork(value = 3) +@Warmup(iterations = 3) +@Measurement(iterations = 5) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +public class DynamicMapperBenchmark { + + @Param({ "1600172297" }) + private long seed; + + private Random random; + private SourceToParse[] sources; + + @Setup + public void setUp() { + this.random = new Random(seed); + this.sources = generateRandomDocuments(500); + } + + private SourceToParse[] generateRandomDocuments(int count) { + var docs = new SourceToParse[count]; + for (int i = 0; i < count; i++) { + docs[i] = generateRandomDocument(); + } + return docs; + } + + private SourceToParse generateRandomDocument() { + int textFields = 50; + int intFields = 50; + int floatFields = 50; + int objFields = 10; + int objFieldDepth = 10; + int fieldValueCountMax = 25; + StringBuilder builder = new StringBuilder(); + builder.append("{"); + for (int i = 0; i < textFields; i++) { + if (random.nextBoolean()) { + StringBuilder fieldValueBuilder = generateTextField(fieldValueCountMax); + builder.append("\"text_field_").append(i).append("\":").append(fieldValueBuilder).append(","); + } + } + for (int i = 0; i < intFields; i++) { + if (random.nextBoolean()) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + builder.append("\"int_field_") + .append(i) + .append("\":") + .append(Arrays.toString(IntStream.generate(() -> random.nextInt()).limit(fieldValueCount).toArray())) + .append(","); + } + } + for (int i = 0; i < floatFields; i++) { + if (random.nextBoolean()) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + builder.append("\"float_field_") + .append(i) + .append("\":") + .append(Arrays.toString(DoubleStream.generate(() -> random.nextFloat()).limit(fieldValueCount).toArray())) + .append(","); + } + } + for (int i = 0; i < objFields; i++) { + final int idx = i; + if (random.nextBoolean()) { + continue; + } + String objFieldPrefix = Stream.generate(() -> "obj_field_" + idx).limit(objFieldDepth).collect(Collectors.joining(".")); + for (int j = 0; j < textFields; j++) { + if (random.nextBoolean()) { + StringBuilder fieldValueBuilder = generateTextField(fieldValueCountMax); + builder.append("\"") + .append(objFieldPrefix) + .append(".text_field_") + .append(j) + .append("\":") + .append(fieldValueBuilder) + .append(","); + } + } + for (int j = 0; j < intFields; j++) { + if (random.nextBoolean()) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + builder.append("\"") + .append(objFieldPrefix) + .append(".int_field_") + .append(j) + .append("\":") + .append(Arrays.toString(IntStream.generate(() -> random.nextInt()).limit(fieldValueCount).toArray())) + .append(","); + } + } + for (int j = 0; j < floatFields; j++) { + if (random.nextBoolean()) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + builder.append("\"") + .append(objFieldPrefix) + .append(".float_field_") + .append(j) + .append("\":") + .append(Arrays.toString(DoubleStream.generate(() -> random.nextFloat()).limit(fieldValueCount).toArray())) + .append(","); + } + } + } + if (builder.charAt(builder.length() - 1) == ',') { + builder.deleteCharAt(builder.length() - 1); + } + builder.append("}"); + return new SourceToParse(UUIDs.randomBase64UUID(), new BytesArray(builder.toString()), XContentType.JSON); + } + + private StringBuilder generateTextField(int fieldValueCountMax) { + int fieldValueCount = random.nextInt(fieldValueCountMax); + StringBuilder fieldValueBuilder = new StringBuilder(); + fieldValueBuilder.append("["); + for (int j = 0; j < fieldValueCount - 1; j++) { + fieldValueBuilder.append("\"").append(randomString(6)).append("\"").append(","); + } + return fieldValueBuilder.append("\"").append(randomString(6)).append("\"").append("]"); + } + + private String randomString(int maxLength) { + var length = random.nextInt(maxLength); + var builder = new StringBuilder(length); + for (int i = 0; i < length; i++) { + builder.append((byte) (32 + random.nextInt(94))); + } + return builder.toString(); + } + + @SafeVarargs + @SuppressWarnings("varargs") + private T randomFrom(T... items) { + return items[random.nextInt(items.length)]; + } + + @Benchmark + public List benchmarkDynamicallyCreatedFields() throws Exception { + MapperService mapperService = MapperServiceFactory.create("{}"); + for (int i = 0; i < 25; i++) { + DocumentMapper documentMapper = mapperService.documentMapper(); + Mapping mapping = null; + if (documentMapper == null) { + documentMapper = DocumentMapper.createEmpty(mapperService); + mapping = documentMapper.mapping(); + } + ParsedDocument doc = documentMapper.parse(randomFrom(sources)); + if (mapping != null) { + doc.addDynamicMappingsUpdate(mapping); + } + if (doc.dynamicMappingsUpdate() != null) { + mapperService.merge( + "_doc", + new CompressedXContent(XContentHelper.toXContent(doc.dynamicMappingsUpdate(), XContentType.JSON, false)), + MapperService.MergeReason.MAPPING_UPDATE + ); + } + } + return mapperService.documentMapper().parse(randomFrom(sources)).docs(); + } +} From c3bc39a95ab8f84b79b64bff5e2985ff99decf4e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 5 Dec 2023 15:49:35 -0500 Subject: [PATCH 235/263] Log more information in debug when synonyms fail updates (#102946) Related to: https://github.com/elastic/elasticsearch/issues/102261 In test failures, we are not receiving any information around the bulk indexing cause stacktrace, just the message. This adds debug logging and grabs the first stacktrace over all indices. Additionally, the logger groups by the failure message in an effort to find unique failures over all the indices. --- .../test/synonyms/40_synonyms_sets_get.yml | 13 +++++++++ .../SynonymsManagementAPIService.java | 29 +++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml index d13c8e9ffcc65..f2d29bf863a8f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/40_synonyms_sets_get.yml @@ -23,8 +23,21 @@ setup: body: synonyms_set: - synonyms: "pc, computer" + # set logging to debug for issue: https://github.com/elastic/elasticsearch/issues/102261 + - do: + cluster.put_settings: + body: + persistent: + logger.org.elasticsearch.synonyms: DEBUG --- +teardown: + - do: + cluster.put_settings: + body: + persistent: + logger.org.elasticsearch.synonyms: null +--- "List synonyms set": - do: synonyms.get_synonyms_sets: { } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index 848f103aaaf56..20aac833190a7 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -8,6 +8,8 @@ package org.elasticsearch.synonyms; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; @@ -62,6 +64,8 @@ * Manages synonyms performing operations on the system index */ public class SynonymsManagementAPIService { + + private static final Logger logger = LogManager.getLogger(SynonymsManagementAPIService.class); private static final String SYNONYMS_INDEX_NAME_PATTERN = ".synonyms-*"; private static final int SYNONYMS_INDEX_FORMAT = 2; private static final String SYNONYMS_INDEX_CONCRETE_NAME = ".synonyms-" + SYNONYMS_INDEX_FORMAT; @@ -234,11 +238,30 @@ private static SynonymRule sourceMapToSynonymRule(Map docSourceA return new SynonymRule((String) docSourceAsMap.get(SYNONYM_RULE_ID_FIELD), (String) docSourceAsMap.get(SYNONYMS_FIELD)); } + private static void logUniqueFailureMessagesWithIndices(List bulkFailures) { + // check if logger is at least debug + if (logger.isDebugEnabled() == false) { + return; + } + Map> uniqueFailureMessages = bulkFailures.stream() + .collect(Collectors.groupingBy(BulkItemResponse.Failure::getMessage)); + // log each unique failure with their associated indices and the first stacktrace + uniqueFailureMessages.forEach((failureMessage, failures) -> { + logger.debug( + "Error updating synonyms: [{}], indices: [{}], stacktrace: [{}]", + failureMessage, + failures.stream().map(BulkItemResponse.Failure::getIndex).collect(Collectors.joining(",")), + ExceptionsHelper.formatStackTrace(failures.get(0).getCause().getStackTrace()) + ); + }); + } + public void putSynonymsSet(String synonymSetId, SynonymRule[] synonymsSet, ActionListener listener) { deleteSynonymsSetObjects(synonymSetId, listener.delegateFailure((deleteByQueryResponseListener, bulkDeleteResponse) -> { boolean created = bulkDeleteResponse.getDeleted() == 0; final List bulkDeleteFailures = bulkDeleteResponse.getBulkFailures(); if (bulkDeleteFailures.isEmpty() == false) { + logUniqueFailureMessagesWithIndices(bulkDeleteFailures); listener.onFailure( new ElasticsearchException( "Error updating synonyms: " @@ -264,6 +287,12 @@ public void putSynonymsSet(String synonymSetId, SynonymRule[] synonymsSet, Actio bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .execute(deleteByQueryResponseListener.delegateFailure((bulkInsertResponseListener, bulkInsertResponse) -> { if (bulkInsertResponse.hasFailures()) { + logUniqueFailureMessagesWithIndices( + Arrays.stream(bulkInsertResponse.getItems()) + .filter(BulkItemResponse::isFailed) + .map(BulkItemResponse::getFailure) + .collect(Collectors.toList()) + ); bulkInsertResponseListener.onFailure( new ElasticsearchException("Error updating synonyms: " + bulkInsertResponse.buildFailureMessage()) ); From 714611e1efaba13beba25999613b957bb5d83886 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 5 Dec 2023 14:52:15 -0600 Subject: [PATCH 236/263] Clarify javadoc for AckedClusterStateUpdateTask (#103001) Javadoc only change to clarify "acknowledged" --- .../elasticsearch/cluster/AckedClusterStateUpdateTask.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java b/server/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java index 60ee105ade461..b81fde1156bd1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/cluster/AckedClusterStateUpdateTask.java @@ -15,8 +15,9 @@ import org.elasticsearch.core.TimeValue; /** - * An extension interface to {@link ClusterStateUpdateTask} that allows to be notified when - * all the nodes have acknowledged a cluster state update request + * An extension interface to {@link ClusterStateUpdateTask} that allows the caller to be notified after the master has + * computed, published, accepted, committed, and applied the cluster state update AND only after the rest of the nodes + * (or a specified subset) have also accepted and applied the cluster state update. */ public abstract class AckedClusterStateUpdateTask extends ClusterStateUpdateTask implements ClusterStateAckListener { From 4934d083647ea8bd7a59c9fde907c5f7a4c26ba6 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 5 Dec 2023 17:01:22 -0500 Subject: [PATCH 237/263] Deprecate the unused elasticsearch_version field of enrich policy json (#103013) --- docs/changelog/103013.yaml | 5 ++ .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/core/enrich/EnrichPolicy.java | 81 ++++++++++--------- .../enrich/action/PutEnrichPolicyAction.java | 6 -- .../xpack/enrich/EnrichStore.java | 20 +---- .../xpack/enrich/EnrichPolicyRunnerTests.java | 2 +- .../rest-api-spec/test/enrich/10_basic.yml | 32 +++++++- 7 files changed, 81 insertions(+), 66 deletions(-) create mode 100644 docs/changelog/103013.yaml diff --git a/docs/changelog/103013.yaml b/docs/changelog/103013.yaml new file mode 100644 index 0000000000000..bb8eb99088856 --- /dev/null +++ b/docs/changelog/103013.yaml @@ -0,0 +1,5 @@ +pr: 103013 +summary: Deprecate the unused `elasticsearch_version` field of enrich policy json +area: Ingest Node +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 0b899a863e197..5c19edc14075b 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -190,6 +190,7 @@ static TransportVersion def(int id) { public static final TransportVersion SOURCE_IN_SINGLE_VALUE_QUERY_ADDED = def(8_557_00_0); public static final TransportVersion MISSED_INDICES_UPDATE_EXCEPTION_ADDED = def(8_558_00_0); public static final TransportVersion INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED = def(8_559_00_0); + public static final TransportVersion ENRICH_ELASTICSEARCH_VERSION_REMOVED = def(8_560_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java index 600e065900d30..74b274f2fd387 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.enrich; +import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -13,9 +14,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -33,6 +35,11 @@ */ public final class EnrichPolicy implements Writeable, ToXContentFragment { + private static final String ELASTICEARCH_VERSION_DEPRECATION_MESSAGE = + "the [elasticsearch_version] field of an enrich policy has no effect and will be removed in Elasticsearch 9.0"; + + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(EnrichPolicy.class); + public static final String ENRICH_INDEX_NAME_BASE = ".enrich-"; public static final String ENRICH_INDEX_PATTERN = ENRICH_INDEX_NAME_BASE + "*"; @@ -57,7 +64,7 @@ public final class EnrichPolicy implements Writeable, ToXContentFragment { (List) args[1], (String) args[2], (List) args[3], - (Version) args[4] + (String) args[4] ) ); @@ -74,12 +81,7 @@ private static void declareCommonConstructorParsingOptions(ConstructingObjec parser.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES); parser.declareString(ConstructingObjectParser.constructorArg(), MATCH_FIELD); parser.declareStringArray(ConstructingObjectParser.constructorArg(), ENRICH_FIELDS); - parser.declareField( - ConstructingObjectParser.optionalConstructorArg(), - ((p, c) -> Version.fromString(p.text())), - ELASTICSEARCH_VERSION, - ValueType.STRING - ); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), ELASTICSEARCH_VERSION); } public static EnrichPolicy fromXContent(XContentParser parser) throws IOException { @@ -108,37 +110,45 @@ public static EnrichPolicy fromXContent(XContentParser parser) throws IOExceptio private final List indices; private final String matchField; private final List enrichFields; - private final Version elasticsearchVersion; public EnrichPolicy(StreamInput in) throws IOException { - this( - in.readString(), - in.readOptionalWriteable(QuerySource::new), - in.readStringCollectionAsList(), - in.readString(), - in.readStringCollectionAsList(), - Version.readVersion(in) - ); + this.type = in.readString(); + this.query = in.readOptionalWriteable(QuerySource::new); + this.indices = in.readStringCollectionAsList(); + this.matchField = in.readString(); + this.enrichFields = in.readStringCollectionAsList(); + if (in.getTransportVersion().before(TransportVersions.ENRICH_ELASTICSEARCH_VERSION_REMOVED)) { + // consume the passed-in meaningless version that old elasticsearch clusters will send + Version.readVersion(in); + } } public EnrichPolicy(String type, QuerySource query, List indices, String matchField, List enrichFields) { - this(type, query, indices, matchField, enrichFields, Version.CURRENT); + this.type = type; + this.query = query; + this.indices = indices; + this.matchField = matchField; + this.enrichFields = enrichFields; } - public EnrichPolicy( + private EnrichPolicy( String type, QuerySource query, List indices, String matchField, List enrichFields, - Version elasticsearchVersion + String elasticsearchVersion ) { - this.type = type; - this.query = query; - this.indices = indices; - this.matchField = matchField; - this.enrichFields = enrichFields; - this.elasticsearchVersion = elasticsearchVersion != null ? elasticsearchVersion : Version.CURRENT; + this(type, query, indices, matchField, enrichFields); + // for backwards compatibility reasons, it is possible to pass in an elasticsearchVersion -- that version is + // completely ignored and does nothing. we'll fix that in a future version, so send a deprecation warning. + if (elasticsearchVersion != null) { + deprecationLogger.warn( + DeprecationCategory.OTHER, + "enrich_policy_with_elasticsearch_version", + ELASTICEARCH_VERSION_DEPRECATION_MESSAGE + ); + } } public String getType() { @@ -161,10 +171,6 @@ public List getEnrichFields() { return enrichFields; } - public Version getElasticsearchVersion() { - return elasticsearchVersion; - } - public static String getBaseName(String policyName) { return ENRICH_INDEX_NAME_BASE + policyName; } @@ -202,7 +208,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(indices); out.writeString(matchField); out.writeStringCollection(enrichFields); - Version.writeVersion(elasticsearchVersion, out); + if (out.getTransportVersion().before(TransportVersions.ENRICH_ELASTICSEARCH_VERSION_REMOVED)) { + // emit the current version of elasticsearch for bwc serialization reasons + Version.writeVersion(Version.CURRENT, out); + } } @Override @@ -222,9 +231,6 @@ private void toInnerXContent(XContentBuilder builder, Params params) throws IOEx builder.array(INDICES.getPreferredName(), indices.toArray(new String[0])); builder.field(MATCH_FIELD.getPreferredName(), matchField); builder.array(ENRICH_FIELDS.getPreferredName(), enrichFields.toArray(new String[0])); - if (params.paramAsBoolean("include_version", false) && elasticsearchVersion != null) { - builder.field(ELASTICSEARCH_VERSION.getPreferredName(), elasticsearchVersion.toString()); - } } @Override @@ -236,13 +242,12 @@ public boolean equals(Object o) { && Objects.equals(query, policy.query) && indices.equals(policy.indices) && matchField.equals(policy.matchField) - && enrichFields.equals(policy.enrichFields) - && elasticsearchVersion.equals(policy.elasticsearchVersion); + && enrichFields.equals(policy.enrichFields); } @Override public int hashCode() { - return Objects.hash(type, query, indices, matchField, enrichFields, elasticsearchVersion); + return Objects.hash(type, query, indices, matchField, enrichFields); } public String toString() { @@ -310,7 +315,7 @@ public static class NamedPolicy implements Writeable, ToXContentFragment { (List) args[2], (String) args[3], (List) args[4], - (Version) args[5] + (String) args[5] ) ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java index e42a5a05022d2..ec1b04e453bb5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.enrich.action; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -39,11 +38,6 @@ public static class Request extends MasterNodeRequest { - for (String indexExpression : finalPolicy.getIndices()) { + for (String indexExpression : policy.getIndices()) { // indices field in policy can contain wildcards, aliases etc. String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames( current, @@ -110,12 +96,12 @@ public static void putPolicy( throw new IllegalArgumentException("source index [" + concreteIndex + "] has no mapping"); } Map mappingSource = mapping.getSourceAsMap(); - EnrichPolicyRunner.validateMappings(name, finalPolicy, concreteIndex, mappingSource); + EnrichPolicyRunner.validateMappings(name, policy, concreteIndex, mappingSource); } } final Map policies = getPolicies(current); - EnrichPolicy existing = policies.putIfAbsent(name, finalPolicy); + EnrichPolicy existing = policies.putIfAbsent(name, policy); if (existing != null) { throw new ResourceAlreadyExistsException("policy [{}] already exists", name); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index f95c4959be771..aac9f5e74cf0e 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -313,7 +313,7 @@ private void testNumberRangeMatchType(String rangeType) throws Exception { } ); List enrichFields = List.of("zipcode"); - EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.RANGE_TYPE, null, List.of(sourceIndex), "range", enrichFields, null); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.RANGE_TYPE, null, List.of(sourceIndex), "range", enrichFields); String policyName = "test1"; final long createTime = randomNonNegativeLong(); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/10_basic.yml index e580b188c9ba4..afe4bcabee3d9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/enrich/10_basic.yml @@ -1,6 +1,4 @@ ---- -"Test enrich crud apis": - +setup: - do: indices.create: index: bar @@ -13,8 +11,9 @@ type: keyword b: type: keyword - - is_true: acknowledged +--- +"Test enrich crud apis": - do: enrich.put_policy: name: policy-crud @@ -60,3 +59,28 @@ enrich.delete_policy: name: policy-crud - is_true: acknowledged + +--- +"Test using the deprecated elasticsearch_version field results in a warning": + - skip: + version: " - 8.11.99" + reason: "elasticsearch_version field deprecated in 8.12.0, to be removed in 9.0" + features: warnings + + - do: + warnings: + - "the [elasticsearch_version] field of an enrich policy has no effect and will be removed in Elasticsearch 9.0" + enrich.put_policy: + name: policy-crud-warning + body: + match: + indices: ["bar*"] + match_field: baz + enrich_fields: ["a", "b"] + elasticsearch_version: "any string here is acceptable" + - is_true: acknowledged + + - do: + enrich.delete_policy: + name: policy-crud-warning + - is_true: acknowledged From 5134fab2b550ba9f00dd5b3245288a0227b23a49 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 5 Dec 2023 14:36:51 -0800 Subject: [PATCH 238/263] Mute ESQL test --- .../plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 02e9db6ededf1..e0167ce451e80 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -257,7 +257,8 @@ eth1 |alpha |::1 |::1 eth0 |beta |127.0.0.1 |::1 ; -pushDownIPWithComparision#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] +// AwaitsFix: https://github.com/elastic/elasticsearch/issues/103028 +pushDownIPWithComparision#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only]-Ignore from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true warning:Line 1:20: evaluation of [ip1 > to_ip(\"127.0.0.1\")] failed, treating result as null. Only first 20 failures recorded. From d795d82253c274fd8989a22146ae4601c6a72104 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 5 Dec 2023 15:20:28 -0800 Subject: [PATCH 239/263] Use extra properties for configuring test output dump on error (#103023) --- .../gradle/internal/ElasticsearchTestBasePlugin.java | 2 +- .../gradle/internal/test/ErrorReportingTestListener.java | 3 ++- .../gradle/internal/test/rest/RestTestBasePlugin.java | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index 50f4000612981..31b62c4ac700f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -57,7 +57,7 @@ public void apply(Project project) { File testOutputDir = new File(test.getReports().getJunitXml().getOutputLocation().getAsFile().get(), "output"); ErrorReportingTestListener listener = new ErrorReportingTestListener(test, testOutputDir); - test.getInputs().property(DUMP_OUTPUT_ON_FAILURE_PROP_NAME, true); + test.getExtensions().getExtraProperties().set(DUMP_OUTPUT_ON_FAILURE_PROP_NAME, true); test.getExtensions().add("errorReportingTestListener", listener); test.addTestOutputListener(listener); test.addTestListener(listener); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java index 15cae4868034e..e3149d63e5c5b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/ErrorReportingTestListener.java @@ -258,7 +258,8 @@ public void close() throws IOException { } private boolean isDumpOutputEnabled() { - return (Boolean) testTask.getInputs() + return (Boolean) testTask.getExtensions() + .getExtraProperties() .getProperties() .getOrDefault(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, true); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index a7e72b55f9117..b51842bbdcbf7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -168,7 +168,7 @@ public void apply(Project project) { nonInputSystemProperties.systemProperty(TESTS_MAX_PARALLEL_FORKS_SYSPROP, () -> String.valueOf(task.getMaxParallelForks())); // Disable test failure reporting since this stuff is now captured in build scans - task.getInputs().property(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, false); + task.getExtensions().getExtraProperties().set(ElasticsearchTestBasePlugin.DUMP_OUTPUT_ON_FAILURE_PROP_NAME, false); // Disable the security manager and syscall filter since the test framework needs to fork processes task.systemProperty("tests.security.manager", "false"); From 6bba0efa7807c57b6884143012b7c518367615ca Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Wed, 6 Dec 2023 02:24:00 +0200 Subject: [PATCH 240/263] ESQL: New telemetry for commands (#102937) * Add enrich,mv_expand,show,row,from,drop,keep,rename commands to telemetry --- docs/changelog/102937.yaml | 5 + docs/reference/rest-api/usage.asciidoc | 10 +- .../xpack/esql/analysis/Analyzer.java | 19 +- .../xpack/esql/analysis/Verifier.java | 39 +-- .../xpack/esql/stats/FeatureMetric.java | 64 ++++- .../esql/stats/VerifierMetricsTests.java | 271 +++++++++++++++++- .../rest-api-spec/test/esql/60_usage.yml | 14 +- 7 files changed, 360 insertions(+), 62 deletions(-) create mode 100644 docs/changelog/102937.yaml diff --git a/docs/changelog/102937.yaml b/docs/changelog/102937.yaml new file mode 100644 index 0000000000000..116fbadebe09d --- /dev/null +++ b/docs/changelog/102937.yaml @@ -0,0 +1,5 @@ +pr: 102937 +summary: "ESQL: New telemetry commands" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index e2529de75f0e7..6bf7f2139680b 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -220,7 +220,15 @@ GET /_xpack/usage "grok" : 0, "limit" : 0, "where" : 0, - "sort" : 0 + "sort" : 0, + "drop" : 0, + "show" : 0, + "rename" : 0, + "mv_expand" : 0, + "keep" : 0, + "enrich" : 0, + "from" : 0, + "row" : 0 }, "queries" : { "rest" : { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index a8462703a2b37..bf97faea7ae74 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; +import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules; import org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.BaseAnalyzerRule; @@ -62,6 +63,7 @@ import org.elasticsearch.xpack.ql.util.StringUtils; import java.util.ArrayList; +import java.util.BitSet; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; @@ -75,6 +77,7 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; import static org.elasticsearch.xpack.ql.analyzer.AnalyzerRules.resolveFunction; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; @@ -107,11 +110,12 @@ public Analyzer(AnalyzerContext context, Verifier verifier) { } public LogicalPlan analyze(LogicalPlan plan) { - return verify(execute(plan)); + BitSet partialMetrics = new BitSet(FeatureMetric.values().length); + return verify(execute(plan), gatherPreAnalysisMetrics(plan, partialMetrics)); } - public LogicalPlan verify(LogicalPlan plan) { - Collection failures = verifier.verify(plan); + public LogicalPlan verify(LogicalPlan plan, BitSet partialMetrics) { + Collection failures = verifier.verify(plan, partialMetrics); if (failures.isEmpty() == false) { throw new VerificationException(failures); } @@ -702,4 +706,13 @@ private static Expression stringToDate(Expression stringExpression) { return result; } } + + private BitSet gatherPreAnalysisMetrics(LogicalPlan plan, BitSet b) { + // count only the explicit "limit" the user added, otherwise all queries will have a "limit" and telemetry won't reflect reality + if (plan.collectFirstChildren(Limit.class::isInstance).isEmpty() == false) { + b.set(LIMIT.ordinal()); + } + plan.forEachDown(p -> FeatureMetric.set(p, b)); + return b; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 479690912c904..d38dd57ff6aa8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -11,9 +11,7 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; -import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Eval; -import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.stats.FeatureMetric; @@ -34,10 +32,7 @@ import org.elasticsearch.xpack.ql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; -import org.elasticsearch.xpack.ql.plan.logical.Filter; -import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.ql.plan.logical.OrderBy; import org.elasticsearch.xpack.ql.plan.logical.Project; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -50,13 +45,6 @@ import java.util.Set; import java.util.stream.Stream; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DISSECT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.EVAL; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SORT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.STATS; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.WHERE; import static org.elasticsearch.xpack.ql.analyzer.VerifierChecks.checkFilterConditionType; import static org.elasticsearch.xpack.ql.common.Failure.fail; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -73,9 +61,11 @@ public Verifier(Metrics metrics) { * Verify that a {@link LogicalPlan} can be executed. * * @param plan The logical plan to be verified + * @param partialMetrics a bitset indicating a certain command (or "telemetry feature") is present in the query * @return a collection of verification failures; empty if and only if the plan is valid */ - Collection verify(LogicalPlan plan) { + Collection verify(LogicalPlan plan, BitSet partialMetrics) { + assert partialMetrics != null; Set failures = new LinkedHashSet<>(); // quick verification for unresolved attributes @@ -152,7 +142,7 @@ else if (p.resolved()) { // gather metrics if (failures.isEmpty()) { - gatherMetrics(plan); + gatherMetrics(plan, partialMetrics); } return failures; @@ -259,25 +249,8 @@ private static void checkBinaryComparison(LogicalPlan p, Set failures) }); } - private void gatherMetrics(LogicalPlan plan) { - BitSet b = new BitSet(FeatureMetric.values().length); - plan.forEachDown(p -> { - if (p instanceof Dissect) { - b.set(DISSECT.ordinal()); - } else if (p instanceof Eval) { - b.set(EVAL.ordinal()); - } else if (p instanceof Grok) { - b.set(GROK.ordinal()); - } else if (p instanceof Limit) { - b.set(LIMIT.ordinal()); - } else if (p instanceof OrderBy) { - b.set(SORT.ordinal()); - } else if (p instanceof Aggregate) { - b.set(STATS.ordinal()); - } else if (p instanceof Filter) { - b.set(WHERE.ordinal()); - } - }); + private void gatherMetrics(LogicalPlan plan, BitSet b) { + plan.forEachDown(p -> FeatureMetric.set(p, b)); for (int i = b.nextSetBit(0); i >= 0; i = b.nextSetBit(i + 1)) { metrics.inc(FeatureMetric.values()[i]); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java index d2680f30ea03f..d30aff3139495 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java @@ -7,25 +7,73 @@ package org.elasticsearch.xpack.esql.stats; +import org.elasticsearch.xpack.esql.plan.logical.Dissect; +import org.elasticsearch.xpack.esql.plan.logical.Drop; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.Keep; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.Rename; +import org.elasticsearch.xpack.esql.plan.logical.Row; +import org.elasticsearch.xpack.esql.plan.logical.show.ShowFunctions; +import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; +import org.elasticsearch.xpack.ql.plan.logical.Aggregate; +import org.elasticsearch.xpack.ql.plan.logical.EsRelation; +import org.elasticsearch.xpack.ql.plan.logical.Filter; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.plan.logical.OrderBy; + +import java.util.BitSet; import java.util.Locale; +import java.util.function.Predicate; public enum FeatureMetric { - /** * The order of these enum values is important, do not change it. * For any new values added to it, they should go at the end of the list. * see {@link org.elasticsearch.xpack.esql.analysis.Verifier#gatherMetrics} */ - DISSECT, - EVAL, - GROK, - LIMIT, - SORT, - STATS, - WHERE; + DISSECT(Dissect.class::isInstance), + EVAL(Eval.class::isInstance), + GROK(Grok.class::isInstance), + LIMIT(plan -> false), // the limit is checked in Analyzer.gatherPreAnalysisMetrics, because it has a more complex and general check + SORT(OrderBy.class::isInstance), + STATS(Aggregate.class::isInstance), + WHERE(Filter.class::isInstance), + ENRICH(Enrich.class::isInstance), + MV_EXPAND(MvExpand.class::isInstance), + SHOW(plan -> plan instanceof ShowInfo || plan instanceof ShowFunctions), + ROW(Row.class::isInstance), + FROM(EsRelation.class::isInstance), + DROP(Drop.class::isInstance), + KEEP(Keep.class::isInstance), + RENAME(Rename.class::isInstance); + + private Predicate planCheck; + + FeatureMetric(Predicate planCheck) { + this.planCheck = planCheck; + } @Override public String toString() { return this.name().toLowerCase(Locale.ROOT); } + + public static void set(LogicalPlan plan, BitSet bitset) { + for (FeatureMetric metric : FeatureMetric.values()) { + if (set(plan, bitset, metric)) { + return; + } + } + } + + public static boolean set(LogicalPlan plan, BitSet bitset, FeatureMetric metric) { + var isMatch = metric.planCheck.test(plan); + if (isMatch) { + bitset.set(metric.ordinal()); + } + return isMatch; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index eb91a540e6e82..6dc15d67e0560 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -9,19 +9,25 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; -import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.parser.EsqlParser; -import org.elasticsearch.xpack.ql.index.IndexResolution; import java.util.List; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzer; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DISSECT; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.DROP; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ENRICH; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.EVAL; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.FROM; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.KEEP; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.MV_EXPAND; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.RENAME; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ROW; +import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SHOW; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.SORT; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.STATS; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.WHERE; @@ -36,10 +42,18 @@ public void testDissectQuery() { assertEquals(1L, dissect(c)); assertEquals(0, eval(c)); assertEquals(0, grok(c)); - assertEquals(1L, limit(c)); + assertEquals(0, limit(c)); assertEquals(0, sort(c)); assertEquals(0, stats(c)); assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); } public void testEvalQuery() { @@ -47,10 +61,18 @@ public void testEvalQuery() { assertEquals(0, dissect(c)); assertEquals(1L, eval(c)); assertEquals(0, grok(c)); - assertEquals(1L, limit(c)); + assertEquals(0, limit(c)); assertEquals(0, sort(c)); assertEquals(0, stats(c)); assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); } public void testGrokQuery() { @@ -58,10 +80,18 @@ public void testGrokQuery() { assertEquals(0, dissect(c)); assertEquals(0, eval(c)); assertEquals(1L, grok(c)); - assertEquals(1L, limit(c)); + assertEquals(0, limit(c)); assertEquals(0, sort(c)); assertEquals(0, stats(c)); assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); } public void testLimitQuery() { @@ -73,6 +103,14 @@ public void testLimitQuery() { assertEquals(0, sort(c)); assertEquals(0, stats(c)); assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); } public void testSortQuery() { @@ -80,10 +118,18 @@ public void testSortQuery() { assertEquals(0, dissect(c)); assertEquals(0, eval(c)); assertEquals(0, grok(c)); - assertEquals(1L, limit(c)); + assertEquals(0, limit(c)); assertEquals(1L, sort(c)); assertEquals(0, stats(c)); assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); } public void testStatsQuery() { @@ -91,10 +137,18 @@ public void testStatsQuery() { assertEquals(0, dissect(c)); assertEquals(0, eval(c)); assertEquals(0, grok(c)); - assertEquals(1L, limit(c)); + assertEquals(0, limit(c)); assertEquals(0, sort(c)); assertEquals(1L, stats(c)); assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); } public void testWhereQuery() { @@ -102,10 +156,18 @@ public void testWhereQuery() { assertEquals(0, dissect(c)); assertEquals(0, eval(c)); assertEquals(0, grok(c)); - assertEquals(1L, limit(c)); + assertEquals(0, limit(c)); assertEquals(0, sort(c)); assertEquals(0, stats(c)); assertEquals(1L, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); } public void testTwoWhereQuery() { @@ -117,6 +179,14 @@ public void testTwoWhereQuery() { assertEquals(1L, sort(c)); assertEquals(0, stats(c)); assertEquals(1L, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); } public void testTwoQueriesExecuted() { @@ -144,10 +214,153 @@ public void testTwoQueriesExecuted() { assertEquals(1L, dissect(c)); assertEquals(1L, eval(c)); assertEquals(1L, grok(c)); - assertEquals(2L, limit(c)); + assertEquals(1L, limit(c)); assertEquals(2L, sort(c)); assertEquals(1L, stats(c)); assertEquals(2L, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(2L, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); + } + + public void testEnrich() { + Counters c = esql(""" + from employees + | sort emp_no + | limit 1 + | eval x = to_string(languages) + | enrich languages on x + | keep emp_no, language_name"""); + assertEquals(0, dissect(c)); + assertEquals(1L, eval(c)); + assertEquals(0, grok(c)); + assertEquals(1L, limit(c)); + assertEquals(1L, sort(c)); + assertEquals(0, stats(c)); + assertEquals(0, where(c)); + assertEquals(1L, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(1L, keep(c)); + assertEquals(0, rename(c)); + } + + public void testMvExpand() { + Counters c = esql(""" + from employees + | where emp_no == 10004 + | limit 1 + | keep emp_no, job + | mv_expand job + | where job LIKE \"*a*\" + | limit 2 + | where job LIKE \"*a*\" + | limit 3"""); + assertEquals(0, dissect(c)); + assertEquals(0, eval(c)); + assertEquals(0, grok(c)); + assertEquals(1L, limit(c)); + assertEquals(0, sort(c)); + assertEquals(0, stats(c)); + assertEquals(1L, where(c)); + assertEquals(0, enrich(c)); + assertEquals(1L, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(1L, keep(c)); + assertEquals(0, rename(c)); + } + + public void testShowFunctionsOrInfo() { + String showCommand = randomFrom("show functions", "show info"); + Counters c = esql(showCommand + " | stats a = count(*), b = count(*), c = count(*) | mv_expand c"); + assertEquals(0, dissect(c)); + assertEquals(0, eval(c)); + assertEquals(0, grok(c)); + assertEquals(0, limit(c)); + assertEquals(0, sort(c)); + assertEquals(1L, stats(c)); + assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(1L, mvExpand(c)); + assertEquals(1L, show(c)); + assertEquals(0, row(c)); + assertEquals(0, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); + } + + public void testRow() { + Counters c = esql("row a = [\"1\", \"2\"] | enrich languages on a with a_lang = language_name"); + assertEquals(0, dissect(c)); + assertEquals(0, eval(c)); + assertEquals(0, grok(c)); + assertEquals(0, limit(c)); + assertEquals(0, sort(c)); + assertEquals(0, stats(c)); + assertEquals(0, where(c)); + assertEquals(1L, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(1L, row(c)); + assertEquals(0, from(c)); + assertEquals(0, drop(c)); + assertEquals(0, keep(c)); + assertEquals(0, rename(c)); + } + + public void testDropAndRename() { + Counters c = esql("from employees | rename gender AS foo | stats bar = count(*) by foo | drop foo | sort bar | drop bar"); + assertEquals(0, dissect(c)); + assertEquals(0, eval(c)); + assertEquals(0, grok(c)); + assertEquals(0, limit(c)); + assertEquals(1L, sort(c)); + assertEquals(1L, stats(c)); + assertEquals(0, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(1L, drop(c)); + assertEquals(0, keep(c)); + assertEquals(1L, rename(c)); + } + + public void testKeep() { + Counters c = esql(""" + from employees + | keep emp_no, languages + | where languages is null or emp_no <= 10030 + | where languages in (2, 3, emp_no) + | keep languages"""); + assertEquals(0, dissect(c)); + assertEquals(0, eval(c)); + assertEquals(0, grok(c)); + assertEquals(0, limit(c)); + assertEquals(0, sort(c)); + assertEquals(0, stats(c)); + assertEquals(1L, where(c)); + assertEquals(0, enrich(c)); + assertEquals(0, mvExpand(c)); + assertEquals(0, show(c)); + assertEquals(0, row(c)); + assertEquals(1L, from(c)); + assertEquals(0, drop(c)); + assertEquals(1L, keep(c)); + assertEquals(0, rename(c)); } private long dissect(Counters c) { @@ -178,8 +391,40 @@ private long where(Counters c) { return c.get(FPREFIX + WHERE); } - private Counters esql(String sql) { - return esql(sql, null); + private long enrich(Counters c) { + return c.get(FPREFIX + ENRICH); + } + + private long mvExpand(Counters c) { + return c.get(FPREFIX + MV_EXPAND); + } + + private long show(Counters c) { + return c.get(FPREFIX + SHOW); + } + + private long row(Counters c) { + return c.get(FPREFIX + ROW); + } + + private long from(Counters c) { + return c.get(FPREFIX + FROM); + } + + private long drop(Counters c) { + return c.get(FPREFIX + DROP); + } + + private long keep(Counters c) { + return c.get(FPREFIX + KEEP); + } + + private long rename(Counters c) { + return c.get(FPREFIX + RENAME); + } + + private Counters esql(String esql) { + return esql(esql, null); } private void esqlWithVerifier(String esql, Verifier verifier) { @@ -187,15 +432,13 @@ private void esqlWithVerifier(String esql, Verifier verifier) { } private Counters esql(String esql, Verifier v) { - IndexResolution mapping = AnalyzerTestUtils.analyzerDefaultMapping(); - Verifier verifier = v; Metrics metrics = null; if (v == null) { metrics = new Metrics(); verifier = new Verifier(metrics); } - analyzer(mapping, verifier).analyze(parser.createStatement(esql)); + analyzer(verifier).analyze(parser.createStatement(esql)); return metrics == null ? null : metrics.stats(); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index ad46a3c2d9c3e..0e2838c976799 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -1,8 +1,8 @@ --- setup: - skip: - version: " - 8.10.99" - reason: "ESQL is available in 8.11+" + version: " - 8.11.99" + reason: "Latest, more complete, telemetry available in 8.12+" - do: indices.create: @@ -23,11 +23,19 @@ setup: - do: {xpack.usage: {}} - match: { esql.available: true } - match: { esql.enabled: true } - - length: { esql.features: 7 } + - length: { esql.features: 15 } - set: {esql.features.dissect: dissect_counter} + - set: {esql.features.drop: drop_counter} - set: {esql.features.eval: eval_counter} + - set: {esql.features.enrich: enrich_counter} + - set: {esql.features.from: from_counter} - set: {esql.features.grok: grok_counter} + - set: {esql.features.keep: keep_counter} - set: {esql.features.limit: limit_counter} + - set: {esql.features.mv_expand: mv_expand_counter} + - set: {esql.features.rename: rename_counter} + - set: {esql.features.row: row_counter} + - set: {esql.features.show: show_counter} - set: {esql.features.sort: sort_counter} - set: {esql.features.stats: stats_counter} - set: {esql.features.where: where_counter} From ab0bb4889aaffe27129221b68197dccb0733ae6e Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 6 Dec 2023 08:33:27 +0100 Subject: [PATCH 241/263] Fix :plugins:repository-hdfs:forbiddenApisJavaRestTest (#102983) Reworking forbiddenApis check to use gradle worker api exposed a bug in how we resolve krb5kdc keytab information. This fixes the depenendency to krb5kdc keytab configuration and its builtBy task. This also changes the usage of krb5kdc keytab files to be passed directly to task classpath as they are only required at runtime and directly having them as part of javaRestTestRuntimeOnly would mean precommit requires krb5kdc compose up which we definitely not want --- plugins/repository-hdfs/build.gradle | 12 +++++------- test/fixtures/krb5kdc-fixture/build.gradle | 2 +- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 5db01ed636995..57f4fc9a04ecd 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -59,12 +59,6 @@ dependencies { runtimeOnly "org.slf4j:slf4j-nop:${versions.slf4j}" // runtimeOnly("org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}") https://github.com/elastic/elasticsearch/issues/93714 - // Set the keytab files in the classpath so that we can access them from test code without the security manager - // freaking out. - if (isEclipse == false) { - javaRestTestRuntimeOnly project(path: ':test:fixtures:krb5kdc-fixture', configuration:'krb5KeytabsHdfsDir') - } - krb5Keytabs project(path: ':test:fixtures:krb5kdc-fixture', configuration: 'krb5KeytabsHdfsDir') krb5Config project(path: ':test:fixtures:krb5kdc-fixture', configuration: 'krb5ConfHdfsFile') } @@ -187,7 +181,11 @@ for (int hadoopVersion = minTestedHadoopVersion; hadoopVersion <= maxTestedHadoo } } testClassesDirs = sourceSets.javaRestTest.output.classesDirs - classpath = sourceSets.javaRestTest.runtimeClasspath + files(portsFileDir) + // Set the keytab files in the classpath so that we can access them from test code without the security manager + // freaking out. + classpath = sourceSets.javaRestTest.runtimeClasspath + + configurations.krb5Keytabs + + files(portsFileDir) } } diff --git a/test/fixtures/krb5kdc-fixture/build.gradle b/test/fixtures/krb5kdc-fixture/build.gradle index 33228661b9a4a..2ad51718f272b 100644 --- a/test/fixtures/krb5kdc-fixture/build.gradle +++ b/test/fixtures/krb5kdc-fixture/build.gradle @@ -52,6 +52,6 @@ artifacts { builtBy("postProcessFixture") } krb5KeytabsHdfsDir(file("$testFixturesDir/shared/hdfs/keytabs/")) { - builtBy("preProcessFixture") + builtBy("postProcessFixture") } } From 64ac6a1f62af157cbdaa06bc4c82168940ab461a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Wed, 6 Dec 2023 10:35:59 +0100 Subject: [PATCH 242/263] [Profiling] Fix flamegraph aggregation (#103010) * [Profiling] Fix flamegraph aggregation * Use forEach() to traverse frame metadata --- .../profiling/GetFlameGraphActionIT.java | 12 ++-- .../elasticsearch/xpack/profiling/Frame.java | 2 +- .../profiling/GetFlamegraphResponse.java | 16 +++++ .../xpack/profiling/StackFrame.java | 62 +++++-------------- .../TransportGetFlamegraphAction.java | 9 ++- 5 files changed, 47 insertions(+), 54 deletions(-) diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java index 586071ad7c1f3..b55ea03557cf3 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java @@ -9,13 +9,15 @@ public class GetFlameGraphActionIT extends ProfilingTestCase { public void testGetStackTracesUnfiltered() throws Exception { - GetStackTracesRequest request = new GetStackTracesRequest(10, 1.0d, 1.0d, null, null, null, null, null, null, null, null); + GetStackTracesRequest request = new GetStackTracesRequest(1000, 600.0d, 1.0d, null, null, null, null, null, null, null, null); GetFlamegraphResponse response = client().execute(GetFlamegraphAction.INSTANCE, request).get(); // only spot-check top level properties - detailed tests are done in unit tests - assertEquals(297, response.getSize()); + assertEquals(994, response.getSize()); assertEquals(1.0d, response.getSamplingRate(), 0.001d); - assertEquals(60, response.getSelfCPU()); - assertEquals(1956, response.getTotalCPU()); - assertEquals(40, response.getTotalSamples()); + assertEquals(44, response.getSelfCPU()); + assertEquals(1865, response.getTotalCPU()); + assertEquals(1.3651d, response.getSelfAnnualCostsUSD(), 0.0001d); + assertEquals(0.000144890d, response.getSelfAnnualCO2Tons(), 0.000000001d); + assertEquals(44, response.getTotalSamples()); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java index 42d830ed00477..b2a37b7cfa903 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/Frame.java @@ -7,4 +7,4 @@ package org.elasticsearch.xpack.profiling; -public record Frame(String fileName, String functionName, int functionOffset, int lineNumber, boolean inline) {} +public record Frame(String fileName, String functionName, int functionOffset, int lineNumber, boolean inline, boolean last) {} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java index 6666c326d710d..0e24d4754e2ce 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java @@ -222,6 +222,22 @@ public long getTotalCPU() { return totalCPU; } + public double getSelfAnnualCostsUSD() { + return selfAnnualCostsUSD; + } + + public double getTotalAnnualCostsUSD() { + return totalAnnualCostsUSD; + } + + public double getSelfAnnualCO2Tons() { + return selfAnnualCO2Tons; + } + + public double getTotalAnnualCO2Tons() { + return totalAnnualCO2Tons; + } + public long getTotalSamples() { return totalSamples; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java index eb5134be70adb..455b150b6ee76 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackFrame.java @@ -12,11 +12,10 @@ import java.io.IOException; import java.util.Collections; -import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.NoSuchElementException; import java.util.Objects; +import java.util.function.Consumer; final class StackFrame implements ToXContentObject { List fileName; @@ -31,6 +30,22 @@ final class StackFrame implements ToXContentObject { this.lineNumber = listOf(lineNumber); } + public void forEach(Consumer action) { + int size = this.functionName.size(); // functionName is the only array that is always set + for (int i = 0; i < size; i++) { + action.accept( + new Frame( + fileName.size() > i ? fileName.get(i) : "", + functionName.get(i), + functionOffset.size() > i ? functionOffset.get(i) : 0, + lineNumber.size() > i ? lineNumber.get(i) : 0, + i > 0, + i == size - 1 + ) + ); + } + } + @SuppressWarnings("unchecked") private static List listOf(Object o) { if (o instanceof List) { @@ -55,11 +70,6 @@ public boolean isEmpty() { return fileName.isEmpty() && functionName.isEmpty() && functionOffset.isEmpty() && lineNumber.isEmpty(); } - public Iterable frames() { - return new Frames(); - - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -90,42 +100,4 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(fileName, functionName, functionOffset, lineNumber); } - - private class Frames implements Iterable { - @Override - public Iterator iterator() { - return new Iterator<>() { - private int currentElement = 0; - - @Override - public boolean hasNext() { - // array lengths might not be consistent - allow to move until all underlying lists have been exhausted - return currentElement < fileName.size() - || currentElement < functionName.size() - || currentElement < functionOffset.size() - || currentElement < lineNumber.size(); - } - - @Override - public Frame next() { - if (hasNext() == false) { - throw new NoSuchElementException(); - } - Frame f = new Frame( - get(fileName, currentElement, ""), - get(functionName, currentElement, ""), - get(functionOffset, currentElement, 0), - get(lineNumber, currentElement, 0), - currentElement > 0 - ); - currentElement++; - return f; - } - }; - } - - private static T get(List l, int index, T defaultValue) { - return index < l.size() ? l.get(index) : defaultValue; - } - } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java index a8e8ffcd09769..5f8457e6c3b24 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java @@ -100,8 +100,9 @@ static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { Integer addressOrLine = stackTrace.addressOrLines.get(i); StackFrame stackFrame = response.getStackFrames().getOrDefault(frameId, EMPTY_STACKFRAME); String executable = response.getExecutables().getOrDefault(fileId, ""); + final boolean isLeafFrame = i == frameCount - 1; - for (Frame frame : stackFrame.frames()) { + stackFrame.forEach(frame -> { String frameGroupId = FrameGroupID.create(fileId, addressOrLine, executable, frame.fileName(), frame.functionName()); int nodeId; @@ -127,14 +128,14 @@ static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { frameGroupId ); } - if (i == frameCount - 1) { + if (isLeafFrame && frame.last()) { // Leaf frame: sum up counts for exclusive CPU. builder.addSamplesExclusive(nodeId, samples); builder.addAnnualCO2TonsExclusive(nodeId, annualCO2Tons); builder.addAnnualCostsUSDExclusive(nodeId, annualCostsUSD); } builder.setCurrentNode(nodeId); - } + }); } } return builder.build(); @@ -142,6 +143,7 @@ static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { private static class FlamegraphBuilder { private int currentNode = 0; + // size is the number of nodes in the flamegraph private int size = 0; private long selfCPU; private long totalCPU; @@ -149,6 +151,7 @@ private static class FlamegraphBuilder { private double totalAnnualCO2Tons; private double selfAnnualCostsUSD; private double totalAnnualCostsUSD; + // totalSamples is the total number of samples in the stacktraces private final long totalSamples; // Map: FrameGroupId -> NodeId private final List> edges; From b88df64f03c11e525ebae24e96fe094b99a2d86c Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Wed, 6 Dec 2023 11:20:11 +0100 Subject: [PATCH 243/263] Set Serverless annotation for ESQL REST endpoint (#103006) --- x-pack/plugin/esql/build.gradle | 1 + .../java/org/elasticsearch/compute/gen/ConsumeProcessor.java | 1 + .../elasticsearch/xpack/esql/action/RestEsqlQueryAction.java | 3 +++ 3 files changed, 5 insertions(+) diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 59edbadf0b514..15df4094fdec6 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -21,6 +21,7 @@ dependencies { implementation project('compute:ann') implementation project(':libs:elasticsearch-dissect') implementation project(':libs:elasticsearch-grok') + // Also contains a dummy processor to allow compilation with unused annotations. annotationProcessor project('compute:gen') testImplementation project('qa:testFixtures') diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java index 73ff70df56ed3..677740862cc04 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java @@ -39,6 +39,7 @@ public Set getSupportedAnnotationTypes() { "org.elasticsearch.common.inject.Inject", "org.elasticsearch.xpack.esql.expression.function.FunctionInfo", "org.elasticsearch.xpack.esql.expression.function.Param", + "org.elasticsearch.rest.ServerlessScope", Fixed.class.getName() ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index 701d889391d95..7a1b7f7b9b927 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -13,6 +13,8 @@ import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestCancellableNodeClient; import org.elasticsearch.xcontent.XContentParser; @@ -24,6 +26,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; +@ServerlessScope(Scope.PUBLIC) public class RestEsqlQueryAction extends BaseRestHandler { private static final Logger LOGGER = LogManager.getLogger(RestEsqlQueryAction.class); From bcce4282c471df9017d83dc7d6f8562dd9c88c70 Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Wed, 6 Dec 2023 12:59:20 +0200 Subject: [PATCH 244/263] Correct docs tcp retries timeout (#102968) Closes #102788 --- docs/reference/setup/sysconfig/tcpretries.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/setup/sysconfig/tcpretries.asciidoc b/docs/reference/setup/sysconfig/tcpretries.asciidoc index 884bbc12b0ec1..1a10d9e805d0b 100644 --- a/docs/reference/setup/sysconfig/tcpretries.asciidoc +++ b/docs/reference/setup/sysconfig/tcpretries.asciidoc @@ -32,7 +32,7 @@ therefore reduce the maximum number of TCP retransmissions. You can decrease the maximum number of TCP retransmissions to `5` by running the following command as `root`. Five retransmissions corresponds with a timeout of -around six seconds. +around 13 seconds. [source,sh] ------------------------------------- From ed2f02c963e7ac655034a14a34e045694ead8aeb Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Wed, 6 Dec 2023 12:59:42 +0200 Subject: [PATCH 245/263] Reference stored scripts in snapshot restore (#102966) Closes #102903 --- .../snapshot-restore/apis/restore-snapshot-api.asciidoc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc index 974443e625de3..db8923505973b 100644 --- a/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc @@ -159,6 +159,7 @@ The cluster state includes: * <> * <> * <> +* <> * For snapshots taken after 7.12.0, <> // end::cluster-state-contents[] @@ -260,12 +261,12 @@ You may want to restore an index in-place, for example when no alternative options surface after the <> API reports `no_valid_shard_copy`. -The following request <> `index_1` and then restores it +The following request <> `index_1` and then restores it in-place from the `snapshot_2` snapshot in the `my_repository` repository. [source,console] ---- -POST index_1/_close +POST index_1/_close POST /_snapshot/my_repository/snapshot_2/_restore?wait_for_completion=true { From 447ddf49e1f2831ccc7a3f4f97f47da041b03d53 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 6 Dec 2023 11:17:47 +0000 Subject: [PATCH 246/263] Permit S3HttpHandler#parseRequestBody to fail (#103014) This method may throw an exception if we send it bogus requests, e.g. a truncated body, but that's not a reason to fail tests. This commit removes the `AssertionError` wrapper so that the exception is handled more gracefully. Relates #102976 Closes #103012 --- .../src/main/java/fixture/s3/S3HttpHandler.java | 9 +++++++-- .../blobstore/testkit/S3SnapshotRepoTestKitIT.java | 6 ------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java index 09dfdc991b82c..336b888dd7d3c 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java @@ -21,6 +21,8 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.Tuple; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; @@ -53,6 +55,8 @@ @SuppressForbidden(reason = "this test uses a HttpServer to emulate an S3 endpoint") public class S3HttpHandler implements HttpHandler { + private static final Logger logger = LogManager.getLogger(S3HttpHandler.class); + private final String bucket; private final String path; @@ -382,12 +386,13 @@ private static Tuple parseRequestBody(final HttpExchange } return Tuple.tuple(MessageDigests.toHexString(MessageDigests.digest(bytesReference, MessageDigests.md5())), bytesReference); } catch (Exception e) { + logger.error("exception in parseRequestBody", e); exchange.sendResponseHeaders(500, 0); try (PrintStream printStream = new PrintStream(exchange.getResponseBody())) { - printStream.println(e.toString()); + printStream.println(e); e.printStackTrace(printStream); } - throw new AssertionError("parseRequestBody failed", e); + throw e; } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java index af1af7dc53d19..dcdfc24406a2b 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/src/javaRestTest/java/org/elasticsearch/repositories/blobstore/testkit/S3SnapshotRepoTestKitIT.java @@ -61,10 +61,4 @@ protected Settings repositorySettings() { return Settings.builder().put("client", "repo_test_kit").put("bucket", bucket).put("base_path", basePath).build(); } - - @Override - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103012") - public void testRepositoryAnalysis() throws Exception { - super.testRepositoryAnalysis(); - } } From 70b15945a9dd01b35e364584428266dfb5a26ac2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Wed, 6 Dec 2023 13:28:21 +0100 Subject: [PATCH 247/263] Add ldap user metadata mappings for full name and email (#102925) * Add ldap user metadata mappings for full name and email --- docs/changelog/102925.yaml | 5 + .../settings/security-settings.asciidoc | 10 ++ .../support/LdapMetadataResolverSettings.java | 13 ++- .../xpack/security/authc/ldap/LdapRealm.java | 5 +- .../ldap/support/LdapMetadataResolver.java | 109 ++++++++++++++---- .../authc/ldap/support/LdapSession.java | 19 ++- .../security/authc/ldap/LdapRealmTests.java | 40 ++++++- .../support/LdapMetadataResolverTests.java | 14 +-- .../authc/ldap/support/seven-seas.ldif | 13 +++ .../org/elasticsearch/test/OpenLdapTests.java | 10 +- .../ActiveDirectorySessionFactoryTests.java | 4 +- 11 files changed, 194 insertions(+), 48 deletions(-) create mode 100644 docs/changelog/102925.yaml diff --git a/docs/changelog/102925.yaml b/docs/changelog/102925.yaml new file mode 100644 index 0000000000000..5dd15f4f60429 --- /dev/null +++ b/docs/changelog/102925.yaml @@ -0,0 +1,5 @@ +pr: 102925 +summary: Add ldap user metadata mappings for full name and email +area: Authentication +type: enhancement +issues: [] diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index f1949266f07c5..5cf55b8434f9d 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -431,6 +431,16 @@ Specifies the attribute to examine on the user for group membership. If any `group_search` settings are specified, this setting is ignored. Defaults to `memberOf`. +`user_full_name_attribute`:: +(<>) +Specifies the attribute to examine on the user for the full name of the user. +Defaults to `cn`. + +`user_email_attribute`:: +(<>) +Specifies the attribute to examine on the user for the email address of the user. +Defaults to `mail`. + `user_search.base_dn`:: (<>) Specifies a container DN to search for users. Required diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/support/LdapMetadataResolverSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/support/LdapMetadataResolverSettings.java index 78369ceb56d95..662a50523fe81 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/support/LdapMetadataResolverSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/ldap/support/LdapMetadataResolverSettings.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.xpack.core.security.authc.RealmSettings; -import java.util.Collections; import java.util.List; import java.util.function.Function; @@ -19,9 +18,19 @@ public final class LdapMetadataResolverSettings { key -> Setting.stringListSetting(key, Setting.Property.NodeScope) ); + public static final Function> FULL_NAME_SETTING = RealmSettings.affixSetting( + "user_full_name_attribute", + key -> Setting.simpleString(key, "cn", Setting.Property.NodeScope) + ); + + public static final Function> EMAIL_SETTING = RealmSettings.affixSetting( + "user_email_attribute", + key -> Setting.simpleString(key, "mail", Setting.Property.NodeScope) + ); + private LdapMetadataResolverSettings() {} public static List> getSettings(String type) { - return Collections.singletonList(ADDITIONAL_METADATA_SETTING.apply(type)); + return List.of(ADDITIONAL_METADATA_SETTING.apply(type), EMAIL_SETTING.apply(type), FULL_NAME_SETTING.apply(type)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java index c5894274a469c..91b49f39b4b3c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java @@ -261,10 +261,13 @@ private static void lookupUserFromSession( metadata.put("ldap_groups", ldapData.groups); metadata.putAll(ldapData.metadata); final UserData user = new UserData(username, session.userDn(), ldapData.groups, metadata, session.realm()); + roleMapper.resolveRoles(user, ActionListener.wrap(roles -> { IOUtils.close(session); String[] rolesArray = roles.toArray(new String[roles.size()]); - listener.onResponse(AuthenticationResult.success(new User(username, rolesArray, null, null, metadata, true))); + listener.onResponse( + AuthenticationResult.success(new User(username, rolesArray, ldapData.fullName, ldapData.email, metadata, true)) + ); }, onFailure)); }, onFailure)); loadingGroups = true; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolver.java index 14f78ba82fc27..229b517be5292 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolver.java @@ -13,9 +13,12 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapMetadataResolverSettings; +import org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySIDUtil; import java.util.Arrays; import java.util.Collection; @@ -24,6 +27,7 @@ import java.util.Objects; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySIDUtil.TOKEN_GROUPS; import static org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySIDUtil.convertToString; @@ -31,17 +35,36 @@ import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.searchForEntry; public class LdapMetadataResolver { - private final String[] attributeNames; private final boolean ignoreReferralErrors; + private final String fullNameAttributeName; + private final String emailAttributeName; + private final String[] allAttributeNamesToResolve; public LdapMetadataResolver(RealmConfig realmConfig, boolean ignoreReferralErrors) { - this(realmConfig.getSetting(LdapMetadataResolverSettings.ADDITIONAL_METADATA_SETTING), ignoreReferralErrors); + this( + realmConfig.getSetting(LdapMetadataResolverSettings.FULL_NAME_SETTING), + realmConfig.getSetting(LdapMetadataResolverSettings.EMAIL_SETTING), + realmConfig.getSetting(LdapMetadataResolverSettings.ADDITIONAL_METADATA_SETTING), + ignoreReferralErrors + ); } - LdapMetadataResolver(Collection attributeNames, boolean ignoreReferralErrors) { + LdapMetadataResolver( + String fullNameAttributeName, + String emailAttributeName, + Collection attributeNames, + boolean ignoreReferralErrors + ) { + this.fullNameAttributeName = fullNameAttributeName; + this.emailAttributeName = emailAttributeName; this.attributeNames = attributeNames.toArray(new String[attributeNames.size()]); this.ignoreReferralErrors = ignoreReferralErrors; + this.allAttributeNamesToResolve = Stream.concat( + Stream.of(this.attributeNames), + Stream.of(this.fullNameAttributeName, this.emailAttributeName) + ).distinct().toArray(String[]::new); + } public String[] attributeNames() { @@ -54,12 +77,12 @@ public void resolve( TimeValue timeout, Logger logger, Collection attributes, - ActionListener> listener + ActionListener listener ) { - if (this.attributeNames.length == 0) { - listener.onResponse(Map.of()); + if (Strings.isEmpty(this.fullNameAttributeName) && Strings.isEmpty(this.emailAttributeName) && this.attributeNames.length == 0) { + listener.onResponse(LdapMetadataResult.EMPTY); } else if (attributes != null) { - listener.onResponse(toMap(name -> findAttribute(attributes, name))); + listener.onResponse(toLdapMetadataResult(name -> findAttribute(attributes, name))); } else { searchForEntry( connection, @@ -70,12 +93,12 @@ public void resolve( ignoreReferralErrors, ActionListener.wrap((SearchResultEntry entry) -> { if (entry == null) { - listener.onResponse(Map.of()); + listener.onResponse(LdapMetadataResult.EMPTY); } else { - listener.onResponse(toMap(entry::getAttribute)); + listener.onResponse(toLdapMetadataResult(entry::getAttribute)); } }, listener::onFailure), - this.attributeNames + allAttributeNamesToResolve ); } } @@ -84,21 +107,59 @@ private static Attribute findAttribute(Collection attributes, String return attributes.stream().filter(attr -> attr.getName().equals(name)).findFirst().orElse(null); } - private Map toMap(Function attributes) { - return Arrays.stream(this.attributeNames) + public static class LdapMetadataResult { + + public static LdapMetadataResult EMPTY = new LdapMetadataResult(null, null, Map.of()); + + private final String fullName; + private final String email; + private final Map metaData; + + public LdapMetadataResult(@Nullable String fullName, @Nullable String email, Map metaData) { + this.fullName = fullName; + this.email = email; + this.metaData = metaData; + } + + @Nullable + public String getFullName() { + return fullName; + } + + @Nullable + public String getEmail() { + return email; + } + + public Map getMetaData() { + return metaData; + } + } + + private static Object parseLdapAttributeValue(Attribute attr) { + final String[] values = attr.getValues(); + if (attr.getName().equals(TOKEN_GROUPS)) { + return values.length == 1 + ? convertToString(attr.getValueByteArrays()[0]) + : Arrays.stream(attr.getValueByteArrays()).map(ActiveDirectorySIDUtil::convertToString).collect(Collectors.toList()); + } + return values.length == 1 ? values[0] : List.of(values); + + } + + private LdapMetadataResult toLdapMetadataResult(Function attributes) { + Attribute emailAttribute = attributes.apply(this.emailAttributeName); + Attribute fullNameAttribute = attributes.apply(this.fullNameAttributeName); + + Map metaData = Arrays.stream(this.attributeNames) .map(attributes) .filter(Objects::nonNull) - .collect(Collectors.toUnmodifiableMap(attr -> attr.getName(), attr -> { - final String[] values = attr.getValues(); - if (attr.getName().equals(TOKEN_GROUPS)) { - return values.length == 1 - ? convertToString(attr.getValueByteArrays()[0]) - : Arrays.stream(attr.getValueByteArrays()) - .map((sidBytes) -> convertToString(sidBytes)) - .collect(Collectors.toList()); - } - return values.length == 1 ? values[0] : List.of(values); - })); - } + .collect(Collectors.toUnmodifiableMap(Attribute::getName, LdapMetadataResolver::parseLdapAttributeValue)); + return new LdapMetadataResult( + fullNameAttribute == null ? null : LdapMetadataResolver.parseLdapAttributeValue(fullNameAttribute).toString(), + emailAttribute == null ? null : LdapMetadataResolver.parseLdapAttributeValue(emailAttribute).toString(), + metaData + ); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapSession.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapSession.java index 4cc10b73421af..25592f9bdfcf4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapSession.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapSession.java @@ -102,7 +102,7 @@ public void groups(ActionListener> listener) { groupsResolver.resolve(connection, userDn, timeout, logger, attributes, listener); } - public void metadata(ActionListener> listener) { + public void metadata(ActionListener listener) { metadataResolver.resolve(connection, userDn, timeout, logger, attributes, listener); } @@ -111,17 +111,28 @@ public void resolve(ActionListener listener) { groups(ActionListener.wrap(groups -> { logger.debug("Resolved {} LDAP groups [{}] for user [{}]", groups.size(), groups, userDn); metadata(ActionListener.wrap(meta -> { - logger.debug("Resolved {} meta-data fields [{}] for user [{}]", meta.size(), meta, userDn); - listener.onResponse(new LdapUserData(groups, meta)); + logger.debug( + "Resolved full name [{}], email [{}] and {} meta-data [{}] for user [{}]", + meta.getFullName(), + meta.getEmail(), + meta.getMetaData().size(), + meta, + userDn + ); + listener.onResponse(new LdapUserData(meta.getFullName(), meta.getEmail(), groups, meta.getMetaData())); }, listener::onFailure)); }, listener::onFailure)); } public static class LdapUserData { + public final String fullName; + public final String email; public final List groups; public final Map metadata; - public LdapUserData(List groups, Map metadata) { + public LdapUserData(String fullName, String email, List groups, Map metadata) { + this.fullName = fullName; + this.email = email; this.groups = groups; this.metadata = metadata; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java index 9bbf4dd312d27..4ed8832228e85 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; -import org.elasticsearch.xpack.core.security.authc.ldap.LdapSessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapUserSearchSessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.PoolingSessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.SearchGroupsResolverSettings; @@ -67,6 +66,7 @@ import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; +import static org.elasticsearch.xpack.core.security.authc.ldap.LdapSessionFactorySettings.USER_DN_TEMPLATES_SETTING; import static org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings.URLS_SETTING; import static org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings.VERIFICATION_MODE_SETTING_REALM; import static org.hamcrest.Matchers.arrayContaining; @@ -139,7 +139,39 @@ public void testAuthenticateSubTreeGroupSearch() throws Exception { assertThat(user.metadata(), notNullValue()); assertThat(user.metadata().get("ldap_dn"), equalTo("cn=" + VALID_USERNAME + ",ou=people,o=sevenSeas")); assertThat(user.metadata().get("ldap_groups"), instanceOf(List.class)); + assertThat(user.metadata().get("mail"), nullValue()); + assertThat(user.metadata().get("cn"), nullValue()); assertThat((List) user.metadata().get("ldap_groups"), contains("cn=HMS Victory,ou=crews,ou=groups,o=sevenSeas")); + assertThat(user.email(), equalTo("thardy@royalnavy.mod.uk")); + assertThat(user.fullName(), equalTo("Thomas Masterman Hardy")); + } + + public void testAuthenticateMapFullNameAndEmailMetadata() throws Exception { + String groupSearchBase = "o=sevenSeas"; + boolean misssingSetting = randomBoolean(); + Settings settings = Settings.builder() + .put(defaultGlobalSettings) + .put(buildLdapSettings(ldapUrls(), VALID_USER_TEMPLATE, groupSearchBase, LdapSearchScope.SUB_TREE)) + .put(getFullSettingKey(REALM_IDENTIFIER, RealmSettings.ORDER_SETTING), 0) + .put( + getFullSettingKey(REALM_IDENTIFIER, LdapMetadataResolverSettings.FULL_NAME_SETTING), + misssingSetting ? "thisdoesnotexist" : "description" + ) + .put(getFullSettingKey(REALM_IDENTIFIER, LdapMetadataResolverSettings.EMAIL_SETTING), "uid") + .build(); + RealmConfig config = getRealmConfig(REALM_IDENTIFIER, settings); + SessionFactory ldapFactory = LdapRealm.sessionFactory(config, sslService, threadPool); + LdapRealm ldap = new LdapRealm(config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); + ldap.initialize(Collections.singleton(ldap), licenseState); + + PlainActionFuture> future = new PlainActionFuture<>(); + ldap.authenticate(new UsernamePasswordToken("John Samuel", new SecureString(PASSWORD)), future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + User user = result.getValue(); + assertThat(user, notNullValue()); + assertThat(user.email(), equalTo("jsamuel@royalnavy.mod.uk")); + assertThat(user.fullName(), equalTo(misssingSetting ? null : "Clerk John Samuel")); } private RealmConfig getRealmConfig(RealmConfig.RealmIdentifier identifier, Settings settings) { @@ -327,7 +359,7 @@ public void testLdapRealmSelectsLdapSessionFactory() throws Exception { .put(defaultGlobalSettings) .putList(getFullSettingKey(identifier, URLS_SETTING), ldapUrls()) - .putList(getFullSettingKey(identifier.getName(), LdapSessionFactorySettings.USER_DN_TEMPLATES_SETTING), userTemplate) + .putList(getFullSettingKey(identifier.getName(), USER_DN_TEMPLATES_SETTING), userTemplate) .put(getFullSettingKey(identifier, SearchGroupsResolverSettings.BASE_DN), groupSearchBase) .put(getFullSettingKey(identifier, SearchGroupsResolverSettings.SCOPE), LdapSearchScope.SUB_TREE) .put(getFullSettingKey(identifier, VERIFICATION_MODE_SETTING_REALM), SslVerificationMode.CERTIFICATE) @@ -385,7 +417,7 @@ public void testLdapRealmThrowsExceptionForUserTemplateAndSearchSettings() throw final Settings.Builder settingsBuilder = Settings.builder() .put(defaultGlobalSettings) .putList(getFullSettingKey(identifier, URLS_SETTING), ldapUrls()) - .putList(getFullSettingKey(identifier.getName(), LdapSessionFactorySettings.USER_DN_TEMPLATES_SETTING), "cn=foo") + .putList(getFullSettingKey(identifier.getName(), USER_DN_TEMPLATES_SETTING), "cn=foo") .put(getFullSettingKey(identifier, SearchGroupsResolverSettings.BASE_DN), "") .put(getFullSettingKey(identifier, SearchGroupsResolverSettings.SCOPE), LdapSearchScope.SUB_TREE) .put(getFullSettingKey(identifier, VERIFICATION_MODE_SETTING_REALM), SslVerificationMode.CERTIFICATE) @@ -595,7 +627,7 @@ public void testUsageStats() throws Exception { .put(getFullSettingKey(identifier, PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD), PASSWORD) .put(getFullSettingKey(identifier, SearchGroupsResolverSettings.BASE_DN), groupSearchBase) .put(getFullSettingKey(identifier, SearchGroupsResolverSettings.SCOPE), LdapSearchScope.SUB_TREE) - .put(getFullSettingKey(identifier.getName(), LdapSessionFactorySettings.USER_DN_TEMPLATES_SETTING), "--") + .put(getFullSettingKey(identifier.getName(), USER_DN_TEMPLATES_SETTING), "--") .put(getFullSettingKey(identifier, VERIFICATION_MODE_SETTING_REALM), SslVerificationMode.CERTIFICATE); int order = randomIntBetween(0, 10); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolverTests.java index 77be366c8b894..13e6c74616e11 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetadataResolverTests.java @@ -26,7 +26,7 @@ import java.util.Map; import static org.hamcrest.Matchers.aMapWithSize; -import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -54,11 +54,11 @@ public void testParseSettings() throws Exception { .build(); RealmConfig config = new RealmConfig(realmId, settings, TestEnvironment.newEnvironment(settings), new ThreadContext(settings)); resolver = new LdapMetadataResolver(config, false); - assertThat(resolver.attributeNames(), arrayContaining("cn", "uid")); + assertThat(resolver.attributeNames(), arrayContainingInAnyOrder("cn", "uid")); } public void testResolveSingleValuedAttributeFromCachedAttributes() throws Exception { - resolver = new LdapMetadataResolver(Arrays.asList("cn", "uid"), true); + resolver = new LdapMetadataResolver(null, null, Arrays.asList("cn", "uid"), true); final Collection attributes = Arrays.asList( new Attribute("cn", "Clint Barton"), new Attribute("uid", "hawkeye"), @@ -72,7 +72,7 @@ public void testResolveSingleValuedAttributeFromCachedAttributes() throws Except } public void testResolveMultiValuedAttributeFromCachedAttributes() throws Exception { - resolver = new LdapMetadataResolver(Arrays.asList("cn", "uid"), true); + resolver = new LdapMetadataResolver(null, null, Arrays.asList("cn", "uid"), true); final Collection attributes = Arrays.asList( new Attribute("cn", "Clint Barton", "hawkeye"), new Attribute("uid", "hawkeye") @@ -85,7 +85,7 @@ public void testResolveMultiValuedAttributeFromCachedAttributes() throws Excepti } public void testResolveMissingAttributeFromCachedAttributes() throws Exception { - resolver = new LdapMetadataResolver(Arrays.asList("cn", "uid"), true); + resolver = new LdapMetadataResolver(null, null, Arrays.asList("cn", "uid"), true); final Collection attributes = Collections.singletonList(new Attribute("uid", "hawkeye")); final Map map = resolve(attributes); assertThat(map, aMapWithSize(1)); @@ -94,8 +94,8 @@ public void testResolveMissingAttributeFromCachedAttributes() throws Exception { } private Map resolve(Collection attributes) throws Exception { - final PlainActionFuture> future = new PlainActionFuture<>(); + final PlainActionFuture future = new PlainActionFuture<>(); resolver.resolve(null, HAWKEYE_DN, TimeValue.timeValueSeconds(1), logger, attributes, future); - return future.get(); + return future.get().getMetaData(); } } diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/seven-seas.ldif b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/seven-seas.ldif index 527b5f5efae66..6eacf27c1ed71 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/seven-seas.ldif +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/seven-seas.ldif @@ -206,6 +206,19 @@ uid: jhallett mail: jhallett@royalnavy.mod.uk userpassword: pass +dn: cn=John Samuel,ou=people,o=sevenSeas +objectclass: person +objectclass: organizationalPerson +objectclass: inetOrgPerson +objectclass: top +cn: John Samuel +description: Clerk John Samuel +givenname: John +manager: cn=William Bligh,ou=people,o=sevenSeas +sn: Samuel +uid: jsamuel@royalnavy.mod.uk +userpassword: pass + dn: cn=HMS Bounty,ou=crews,ou=groups,o=sevenSeas objectclass: groupOfUniqueNames objectclass: top diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java index b172092cbcab4..c1375823548df 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java @@ -244,7 +244,8 @@ public void testResolveSingleValuedAttributeFromConnection() throws Exception { .putList( getFullSettingKey(realmId.getName(), LdapMetadataResolverSettings.ADDITIONAL_METADATA_SETTING.apply("ldap")), "cn", - "sn" + "sn", + "mail" ) .put(getFullSettingKey(realmId, RealmSettings.ORDER_SETTING), 0) .build(); @@ -257,9 +258,10 @@ public void testResolveSingleValuedAttributeFromConnection() throws Exception { LdapMetadataResolver resolver = new LdapMetadataResolver(config, true); try (LDAPConnection ldapConnection = setupOpenLdapConnection()) { final Map map = resolve(ldapConnection, resolver); - assertThat(map.size(), equalTo(2)); + assertThat(map.size(), equalTo(3)); assertThat(map.get("cn"), equalTo("Clint Barton")); assertThat(map.get("sn"), equalTo("Clint Barton")); + assertThat(map.get("mail"), equalTo("hawkeye@oldap.test.elasticsearch.com")); } } @@ -343,9 +345,9 @@ private LDAPConnection setupOpenLdapConnection() throws Exception { } private Map resolve(LDAPConnection connection, LdapMetadataResolver resolver) throws Exception { - final PlainActionFuture> future = new PlainActionFuture<>(); + final PlainActionFuture future = new PlainActionFuture<>(); resolver.resolve(connection, HAWKEYE_DN, TimeValue.timeValueSeconds(1), logger, null, future); - return future.get(); + return future.get().getMetaData(); } private static String getFromProperty(String port) { diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index c6644a976d08a..120a27c944bd8 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -511,14 +511,14 @@ public void testResolveTokenGroupsSID() throws Exception { .put(getFullSettingKey(REALM_ID, LdapMetadataResolverSettings.ADDITIONAL_METADATA_SETTING), "tokenGroups") .build(); RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, settings); - final PlainActionFuture> future = new PlainActionFuture<>(); + final PlainActionFuture future = new PlainActionFuture<>(); LdapMetadataResolver resolver = new LdapMetadataResolver(config, true); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String userName = "hulk"; try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { assertConnectionCanReconnect(ldap.getConnection()); resolver.resolve(ldap.getConnection(), BRUCE_BANNER_DN, TimeValue.timeValueSeconds(1), logger, null, future); - Map metadataGroupSIDs = future.get(); + Map metadataGroupSIDs = future.get().getMetaData(); assertThat(metadataGroupSIDs.size(), equalTo(1)); assertNotNull(metadataGroupSIDs.get("tokenGroups")); List SIDs = ((List) metadataGroupSIDs.get("tokenGroups")); From 794259ae82c718cd7aa2cc2453843b048fcbf8ff Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Wed, 6 Dec 2023 13:46:02 +0100 Subject: [PATCH 248/263] Use LogDocMergePolicy in Nested aggregation test (#103040) --- .../bucket/nested/NestedAggregatorTests.java | 64 +++++++++++-------- .../nested/ReverseNestedAggregatorTests.java | 55 +++++++++------- 2 files changed, 70 insertions(+), 49 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 82f4597252ac9..46d3b9ee96bf6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; @@ -24,8 +25,8 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; -import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; @@ -106,7 +107,6 @@ * prefixed with the nested path: nestedPath + "." + fieldName * */ -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public class NestedAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD_NAME = "number"; @@ -137,9 +137,14 @@ protected ScriptService getMockScriptService() { return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS, () -> 1L); } + private static RandomIndexWriter newRandomIndexWriter(Directory directory) throws IOException { + final IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(new LogDocMergePolicy()); + return new RandomIndexWriter(random(), directory, conf); + } + public void testNoDocs() throws IOException { try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { // intentionally not writing any docs } try (DirectoryReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { @@ -166,7 +171,7 @@ public void testSingleNestingMax() throws IOException { int expectedNestedDocs = 0; double expectedMaxValue = Double.NEGATIVE_INFINITY; try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { for (int i = 0; i < numRootDocs; i++) { List> documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); @@ -215,7 +220,7 @@ public void testDoubleNestingMax() throws IOException { int expectedNestedDocs = 0; double expectedMaxValue = Double.NEGATIVE_INFINITY; try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { for (int i = 0; i < numRootDocs; i++) { List> documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); @@ -265,7 +270,7 @@ public void testOrphanedDocs() throws IOException { int expectedNestedDocs = 0; double expectedSum = 0; try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { for (int i = 0; i < numRootDocs; i++) { List> documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); @@ -306,7 +311,7 @@ public void testOrphanedDocs() throws IOException { } public void testResetRootDocId() throws Exception { - IndexWriterConfig iwc = new IndexWriterConfig(null); + IndexWriterConfig iwc = new IndexWriterConfig(null).setMergePolicy(new LogDocMergePolicy()); iwc.setMergePolicy(NoMergePolicy.INSTANCE); SeqNoFieldMapper.SequenceIDFields sequenceIDFields = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); try (Directory directory = newDirectory()) { @@ -389,7 +394,7 @@ public void testResetRootDocId() throws Exception { public void testNestedOrdering() throws IOException { try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { iw.addDocuments(generateBook("1", new String[] { "a" }, new int[] { 12, 13, 14 })); iw.addDocuments(generateBook("2", new String[] { "b" }, new int[] { 5, 50 })); iw.addDocuments(generateBook("3", new String[] { "c" }, new int[] { 39, 19 })); @@ -516,7 +521,7 @@ public void testNestedOrdering_random() throws IOException { books.add(Tuple.tuple(Strings.format("%03d", i), chapters)); } try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { int id = 0; for (Tuple book : books) { iw.addDocuments(generateBook(Strings.format("%03d", id), new String[] { book.v1() }, book.v2())); @@ -566,7 +571,7 @@ public void testNestedOrdering_random() throws IOException { public void testPreGetChildLeafCollectors() throws IOException { try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { List> documents = new ArrayList<>(); LuceneDocument document = new LuceneDocument(); document.add(new StringField(IdFieldMapper.NAME, Uid.encodeId("1"), Field.Store.NO)); @@ -684,7 +689,7 @@ public void testFieldAlias() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { for (int i = 0; i < numRootDocs; i++) { List> documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); @@ -725,7 +730,7 @@ public void testNestedWithPipeline() throws IOException { int expectedNestedDocs = 0; double expectedMaxValue = Double.NEGATIVE_INFINITY; try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { for (int i = 0; i < numRootDocs; i++) { List> documents = new ArrayList<>(); expectedMaxValue = Math.max(expectedMaxValue, generateMaxDocs(documents, 1, i, NESTED_OBJECT, VALUE_FIELD_NAME)); @@ -790,24 +795,29 @@ public void testNestedUnderTerms() throws IOException { new TermsAggregationBuilder("resellers").field("reseller_id").size(numResellers) ) ); - testCase(buildResellerData(numProducts, numResellers), result -> { - LongTerms products = (LongTerms) result; - assertThat( - products.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), - equalTo(LongStream.range(0, numProducts).mapToObj(Long::valueOf).collect(toList())) - ); - for (int p = 0; p < numProducts; p++) { - LongTerms.Bucket bucket = products.getBucketByKey(Integer.toString(p)); - assertThat(bucket.getDocCount(), equalTo(1L)); - InternalNested nested = bucket.getAggregations().get("nested"); - assertThat(nested.getDocCount(), equalTo((long) numResellers)); - LongTerms resellers = nested.getAggregations().get("resellers"); + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { + buildResellerData(numProducts, numResellers).accept(iw); + } + try (DirectoryReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { + LongTerms products = searchAndReduce(indexReader, new AggTestConfig(b, resellersMappedFields())); assertThat( - resellers.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), - equalTo(LongStream.range(0, numResellers).mapToObj(Long::valueOf).collect(toList())) + products.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), + equalTo(LongStream.range(0, numProducts).mapToObj(Long::valueOf).collect(toList())) ); + for (int p = 0; p < numProducts; p++) { + LongTerms.Bucket bucket = products.getBucketByKey(Integer.toString(p)); + assertThat(bucket.getDocCount(), equalTo(1L)); + InternalNested nested = bucket.getAggregations().get("nested"); + assertThat(nested.getDocCount(), equalTo((long) numResellers)); + LongTerms resellers = nested.getAggregations().get("resellers"); + assertThat( + resellers.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), + equalTo(LongStream.range(0, numResellers).mapToObj(Long::valueOf).collect(toList())) + ); + } } - }, new AggTestConfig(b, resellersMappedFields())); + } } public static CheckedConsumer buildResellerData(int numProducts, int numResellers) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index f6be5c2171193..226b3b39c5a4a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -13,10 +13,12 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; @@ -44,7 +46,6 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.reverseNested; import static org.hamcrest.Matchers.equalTo; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102974") public class ReverseNestedAggregatorTests extends AggregatorTestCase { private static final String VALUE_FIELD_NAME = "number"; @@ -61,9 +62,14 @@ protected DirectoryReader wrapDirectoryReader(DirectoryReader reader) throws IOE return wrapInMockESDirectoryReader(reader); } + private static RandomIndexWriter newRandomIndexWriter(Directory directory) throws IOException { + final IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(new LogDocMergePolicy()); + return new RandomIndexWriter(random(), directory, conf); + } + public void testNoDocs() throws IOException { try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { // intentionally not writing any docs } try (DirectoryReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { @@ -92,7 +98,7 @@ public void testMaxFromParentDocs() throws IOException { int expectedNestedDocs = 0; double expectedMaxValue = Double.NEGATIVE_INFINITY; try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { for (int i = 0; i < numParentDocs; i++) { List> documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); @@ -148,7 +154,7 @@ public void testFieldAlias() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); try (Directory directory = newDirectory()) { - try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { for (int i = 0; i < numParentDocs; i++) { List> documents = new ArrayList<>(); int numNestedDocs = randomIntBetween(0, 20); @@ -211,26 +217,31 @@ public void testNestedUnderTerms() throws IOException { ) ) ); - testCase(NestedAggregatorTests.buildResellerData(numProducts, numResellers), result -> { - InternalNested nested = (InternalNested) result; - assertThat(nested.getDocCount(), equalTo((long) numProducts * numResellers)); - LongTerms resellers = nested.getAggregations().get("resellers"); - assertThat( - resellers.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), - equalTo(LongStream.range(0, numResellers).mapToObj(Long::valueOf).collect(toList())) - ); - for (int r = 0; r < numResellers; r++) { - LongTerms.Bucket bucket = resellers.getBucketByKey(Integer.toString(r)); - assertThat(bucket.getDocCount(), equalTo((long) numProducts)); - InternalReverseNested reverseNested = bucket.getAggregations().get("reverse_nested"); - assertThat(reverseNested.getDocCount(), equalTo((long) numProducts)); - LongTerms products = reverseNested.getAggregations().get("products"); + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter iw = newRandomIndexWriter(directory)) { + NestedAggregatorTests.buildResellerData(numProducts, numResellers).accept(iw); + } + try (DirectoryReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { + InternalNested nested = searchAndReduce(indexReader, new AggTestConfig(b, NestedAggregatorTests.resellersMappedFields())); + LongTerms resellers = nested.getAggregations().get("resellers"); assertThat( - products.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), - equalTo(LongStream.range(0, numProducts).mapToObj(Long::valueOf).collect(toList())) + resellers.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), + equalTo(LongStream.range(0, numResellers).mapToObj(Long::valueOf).collect(toList())) ); + for (int r = 0; r < numResellers; r++) { + LongTerms.Bucket bucket = resellers.getBucketByKey(Integer.toString(r)); + assertThat(bucket.getDocCount(), equalTo((long) numProducts)); + InternalReverseNested reverseNested = bucket.getAggregations().get("reverse_nested"); + assertThat(reverseNested.getDocCount(), equalTo((long) numProducts)); + LongTerms products = reverseNested.getAggregations().get("products"); + assertThat( + products.getBuckets().stream().map(LongTerms.Bucket::getKeyAsNumber).collect(toList()), + equalTo(LongStream.range(0, numProducts).mapToObj(Long::valueOf).collect(toList())) + ); + } } - }, new AggTestConfig(b, NestedAggregatorTests.resellersMappedFields())); + } } @Override From 6e248a868e33f5f5d1cac33fdc514877a2ac7ae8 Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Wed, 6 Dec 2023 15:26:36 +0200 Subject: [PATCH 249/263] Fix Windows tcpretries as well (#103048) In previous PR #102968 fixed the Linux value but not the Windows one. Relates #102788 --- docs/reference/setup/sysconfig/tcpretries.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/setup/sysconfig/tcpretries.asciidoc b/docs/reference/setup/sysconfig/tcpretries.asciidoc index 1a10d9e805d0b..295938edf7e82 100644 --- a/docs/reference/setup/sysconfig/tcpretries.asciidoc +++ b/docs/reference/setup/sysconfig/tcpretries.asciidoc @@ -17,7 +17,7 @@ Most Linux distributions default to retransmitting any lost packets 15 times. Retransmissions back off exponentially, so these 15 retransmissions take over 900 seconds to complete. This means it takes Linux many minutes to detect a network partition or a failed node with this method. Windows defaults to just 5 -retransmissions which corresponds with a timeout of around 6 seconds. +retransmissions which corresponds with a timeout of around 13 seconds. The Linux default allows for communication over networks that may experience very long periods of packet loss, but this default is excessive and even harmful From 0602eddd3b3c7d5ee7625fd58f2270603c31c8ff Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 6 Dec 2023 15:28:36 +0100 Subject: [PATCH 250/263] Restore CacheFileRegion refcounting for writes (#102843) In #98241 we removed the refcounting around write handler in SharedBytes.IO. But recently we saw wrong bytes being read from the snapshot file under heavy evictions and investigation shows that the bytes belonged to another cached file. Low level logging (hard to reproduce) shows that writes and reads using the same SharedBytes.IO instance but for different cache file region could be interleaved, so that bytes in shared cache could be overwritten and the last read would read (and store in internal index input buffers) bytes from a different file: Thread[elasticsearch[node_t0][stateless_shard][T#4],5,TGRP-IndexCorruptionIT]: 10485760 bytes written using SharedBytes$IO@dc07632 (230716978) for FileCacheKey[shardId=[index-0][0], primaryTerm=1, fileName=stateless_commit_26] Thread[elasticsearch[node_t0][stateless_shard][T#3],5,TGRP-IndexCorruptionIT]: 10485760 bytes written using SharedBytes$IO@dc07632 (230716978) for FileCacheKey[shardId=[index-0][0], primaryTerm=1, fileName=stateless_commit_16] Thread[elasticsearch[node_t0][stateless_shard][T#4],5,TGRP-IndexCorruptionIT]: 375 bytes read using SharedBytes$IO@dc07632 (230716978) for key FileCacheKey[shardId=[index-0][0], primaryTerm=1, fileName=stateless_commit_26] This change fixes resfcounting around the write handler so that the IO instance is decref after bytes are fully written. Relates #98241 --- docs/changelog/102843.yaml | 5 +++ .../shared/SharedBlobCacheService.java | 32 +++++++++++-------- 2 files changed, 24 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/102843.yaml diff --git a/docs/changelog/102843.yaml b/docs/changelog/102843.yaml new file mode 100644 index 0000000000000..7e561fa7cc582 --- /dev/null +++ b/docs/changelog/102843.yaml @@ -0,0 +1,5 @@ +pr: 102843 +summary: Restore `SharedBytes.IO` refcounting on reads & writes +area: Snapshot/Restore +type: bug +issues: [] diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 695e96850e8e1..9867c81808d24 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -708,25 +708,31 @@ void populateAndRead( } private void fillGaps(Executor executor, RangeMissingHandler writer, List gaps) { + final var cacheFileRegion = CacheFileRegion.this; for (SparseFileTracker.Gap gap : gaps) { executor.execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { - assert CacheFileRegion.this.hasReferences(); ensureOpen(); - final int start = Math.toIntExact(gap.start()); - var ioRef = io; - assert regionOwners.get(ioRef) == CacheFileRegion.this; - writer.fillCacheRange( - ioRef, - start, - start, - Math.toIntExact(gap.end() - start), - progress -> gap.onProgress(start + progress) - ); - writeCount.increment(); - + if (cacheFileRegion.tryIncRef() == false) { + throw new AlreadyClosedException("File chunk [" + cacheFileRegion.regionKey + "] has been released"); + } + try { + final int start = Math.toIntExact(gap.start()); + var ioRef = io; + assert regionOwners.get(ioRef) == cacheFileRegion; + writer.fillCacheRange( + ioRef, + start, + start, + Math.toIntExact(gap.end() - start), + progress -> gap.onProgress(start + progress) + ); + writeCount.increment(); + } finally { + cacheFileRegion.decRef(); + } gap.onCompletion(); } From 9883f585acf4a109f506101e131e0c5f7460fe91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Wed, 6 Dec 2023 15:32:09 +0100 Subject: [PATCH 251/263] Fix SLM detection in tests (#103053) --- .../org/elasticsearch/test/rest/RestTestLegacyFeatures.java | 5 ++++- .../elasticsearch/xpack/restart/FullClusterRestartIT.java | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index 1530809a064b1..aedd916c0a0f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -59,6 +59,8 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature ML_INDICES_HIDDEN = new NodeFeature("ml.indices_hidden"); @UpdateForV9 public static final NodeFeature ML_ANALYTICS_MAPPINGS = new NodeFeature("ml.analytics_mappings"); + @UpdateForV9 + public static final NodeFeature SLM_SUPPORTED = new NodeFeature("slm.supported"); @Override public Map getHistoricalFeatures() { @@ -78,7 +80,8 @@ public Map getHistoricalFeatures() { entry(TRANSFORM_NEW_API_ENDPOINT, Version.V_7_5_0), entry(DATA_STREAMS_DATE_IN_INDEX_NAME, Version.V_7_11_0), entry(ML_INDICES_HIDDEN, Version.V_7_7_0), - entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0) + entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0), + entry(SLM_SUPPORTED, Version.V_7_4_0) ); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 2ad66f071d784..bd422c0c578d8 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -594,7 +594,7 @@ public void testSlmPolicyAndStats() throws IOException { Collections.singletonMap("indices", Collections.singletonList("*")), null ); - if (isRunningAgainstOldCluster() && has(ProductFeature.SLM)) { + if (isRunningAgainstOldCluster() && clusterHasFeature(RestTestLegacyFeatures.SLM_SUPPORTED)) { Request createRepoRequest = new Request("PUT", "_snapshot/test-repo"); String repoCreateJson = "{" + " \"type\": \"fs\"," + " \"settings\": {" + " \"location\": \"test-repo\"" + " }" + "}"; createRepoRequest.setJsonEntity(repoCreateJson); @@ -608,7 +608,7 @@ public void testSlmPolicyAndStats() throws IOException { client().performRequest(createSlmPolicyRequest); } - if (isRunningAgainstOldCluster() == false && has(ProductFeature.SLM)) { + if (isRunningAgainstOldCluster() == false && clusterHasFeature(RestTestLegacyFeatures.SLM_SUPPORTED)) { Request getSlmPolicyRequest = new Request("GET", "_slm/policy/test-policy"); Response response = client().performRequest(getSlmPolicyRequest); Map responseMap = entityAsMap(response); From b03c4ab952c78dd925f96ff39895e2fbb07d1382 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 6 Dec 2023 15:44:41 +0100 Subject: [PATCH 252/263] Fix SearchResponseMerger ref counting (#103021) SearchResponseMerger holds references to SearchResponse instances. This commit makes it releasable to enable us to safely hold a reference to each of the SearchResponse that it will merge and safely release it in both merge and error cases. --- .../action/search/MultiSearchResponse.java | 7 +- .../action/search/SearchResponseMerger.java | 16 +- .../search/TransportMultiSearchAction.java | 3 +- .../action/search/TransportSearchAction.java | 22 +- .../search/SearchResponseMergerTests.java | 1451 +++++++++-------- .../search/TransportSearchActionTests.java | 65 +- .../geo/BasePointShapeQueryTestCase.java | 3 +- 7 files changed, 885 insertions(+), 682 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java index aee631fb5d4cf..3cc3370edfe54 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java @@ -61,7 +61,12 @@ public static class Item implements Writeable, ChunkedToXContent { private final SearchResponse response; private final Exception exception; - public Item(SearchResponse response, Exception exception) { + /** + * + * @param response search response that is considered owned by this instance after this constructor returns or {@code null} + * @param exception exception in case of search failure + */ + public Item(@Nullable SearchResponse response, @Nullable Exception exception) { this.response = response; this.exception = exception; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java index 1faa57cbfcd60..b6143cfc51c3a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java @@ -19,6 +19,7 @@ import org.elasticsearch.action.search.SearchResponse.Clusters; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; +import org.elasticsearch.core.Releasable; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.lucene.grouping.TopFieldGroups; import org.elasticsearch.search.SearchHit; @@ -31,6 +32,7 @@ import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; +import org.elasticsearch.transport.LeakTracker; import java.util.ArrayList; import java.util.Arrays; @@ -64,7 +66,7 @@ // TODO it may make sense to integrate the remote clusters responses as a shard response in the initial search phase and ignore hits coming // from the remote clusters in the fetch phase. This would be identical to the removed QueryAndFetch strategy except that only the remote // cluster response would have the fetch results. -final class SearchResponseMerger { +final class SearchResponseMerger implements Releasable { final int from; final int size; final int trackTotalHitsUpTo; @@ -72,6 +74,12 @@ final class SearchResponseMerger { private final AggregationReduceContext.Builder aggReduceContextBuilder; private final List searchResponses = new CopyOnWriteArrayList<>(); + private final Releasable releasable = LeakTracker.wrap(() -> { + for (SearchResponse searchResponse : searchResponses) { + searchResponse.decRef(); + } + }); + SearchResponseMerger( int from, int size, @@ -93,6 +101,7 @@ final class SearchResponseMerger { */ void add(SearchResponse searchResponse) { assert searchResponse.getScrollId() == null : "merging scroll results is not supported"; + searchResponse.mustIncRef(); searchResponses.add(searchResponse); } @@ -383,6 +392,11 @@ private static SearchHits topDocsToSearchHits(TopDocs topDocs, TopDocsStats topD return new SearchHits(searchHits, topDocsStats.getTotalHits(), topDocsStats.getMaxScore(), sortFields, groupField, groupValues); } + @Override + public void close() { + releasable.close(); + } + private static final class FieldDocAndSearchHit extends FieldDoc { private final SearchHit searchHit; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index 1fc9bca607285..c81f3c3dc24c6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -154,9 +154,10 @@ void executeSearch( * when we handle the response rather than going recursive, we fork to another thread, otherwise we recurse. */ final Thread thread = Thread.currentThread(); - client.search(request.request, new ActionListener() { + client.search(request.request, new ActionListener<>() { @Override public void onResponse(final SearchResponse searchResponse) { + searchResponse.mustIncRef(); // acquire reference on behalf of MultiSearchResponse.Item below handleResponse(request.responseSlot, new MultiSearchResponse.Item(searchResponse, null)); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index f164e3342fb60..6045a9ff5efa3 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -765,7 +765,14 @@ private static ActionListener createCCSListener( SearchResponse.Clusters clusters, ActionListener originalListener ) { - return new CCSActionListener<>(clusterAlias, skipUnavailable, countDown, exceptions, clusters, originalListener) { + return new CCSActionListener<>( + clusterAlias, + skipUnavailable, + countDown, + exceptions, + clusters, + ActionListener.releaseAfter(originalListener, searchResponseMerger) + ) { @Override void innerOnResponse(SearchResponse searchResponse) { // TODO: in CCS fail fast ticket we may need to fail the query if the cluster gets marked as FAILED @@ -777,6 +784,11 @@ void innerOnResponse(SearchResponse searchResponse) { SearchResponse createFinalResponse() { return searchResponseMerger.getMergedResponse(clusters); } + + @Override + protected void releaseResponse(SearchResponse searchResponse) { + searchResponse.decRef(); + } }; } @@ -1493,13 +1505,19 @@ private void maybeFinish() { originalListener.onFailure(e); return; } - originalListener.onResponse(response); + try { + originalListener.onResponse(response); + } finally { + releaseResponse(response); + } } else { originalListener.onFailure(exceptions.get()); } } } + protected void releaseResponse(FinalResponse response) {} + abstract FinalResponse createFinalResponse(); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index dc6e69b15ee32..e57b204df0836 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -98,367 +98,446 @@ private void awaitResponsesAdded() throws InterruptedException { public void testMergeTookInMillis() throws InterruptedException { long currentRelativeTime = randomNonNegativeLong(); SearchTimeProvider timeProvider = new SearchTimeProvider(randomLong(), 0, () -> currentRelativeTime); - SearchResponseMerger merger = new SearchResponseMerger( - randomIntBetween(0, 1000), - randomIntBetween(0, 10000), - SearchContext.TRACK_TOTAL_HITS_ACCURATE, - timeProvider, - emptyReduceContextBuilder() - ); - for (int i = 0; i < numResponses; i++) { - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 1, - 1, - 0, - randomNonNegativeLong(), - ShardSearchFailure.EMPTY_ARRAY, - SearchResponseTests.randomClusters() - ); - addResponse(merger, searchResponse); + try ( + SearchResponseMerger merger = new SearchResponseMerger( + randomIntBetween(0, 1000), + randomIntBetween(0, 10000), + SearchContext.TRACK_TOTAL_HITS_ACCURATE, + timeProvider, + emptyReduceContextBuilder() + ) + ) { + for (int i = 0; i < numResponses; i++) { + SearchResponse searchResponse = new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 1, + 1, + 0, + randomNonNegativeLong(), + ShardSearchFailure.EMPTY_ARRAY, + SearchResponseTests.randomClusters() + ); + try { + addResponse(merger, searchResponse); + } finally { + searchResponse.decRef(); + } + } + awaitResponsesAdded(); + SearchResponse searchResponse = merger.getMergedResponse(SearchResponse.Clusters.EMPTY); + try { + assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), searchResponse.getTook().millis()); + } finally { + searchResponse.decRef(); + } } - awaitResponsesAdded(); - SearchResponse searchResponse = merger.getMergedResponse(SearchResponse.Clusters.EMPTY); - assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), searchResponse.getTook().millis()); } public void testMergeShardFailures() throws InterruptedException { SearchTimeProvider searchTimeProvider = new SearchTimeProvider(0, 0, () -> 0); - SearchResponseMerger merger = new SearchResponseMerger( - 0, - 0, - SearchContext.TRACK_TOTAL_HITS_ACCURATE, - searchTimeProvider, - emptyReduceContextBuilder() - ); - PriorityQueue> priorityQueue = new PriorityQueue<>( - Comparator.comparing(Tuple::v1, (o1, o2) -> { - int compareTo = o1.getShardId().compareTo(o2.getShardId()); - if (compareTo != 0) { - return compareTo; - } - return o1.getClusterAlias().compareTo(o2.getClusterAlias()); - }) - ); - int numIndices = numResponses * randomIntBetween(1, 3); - Iterator> indicesPerCluster = randomRealisticIndices(numIndices, numResponses).entrySet().iterator(); - for (int i = 0; i < numResponses; i++) { - Map.Entry entry = indicesPerCluster.next(); - String clusterAlias = entry.getKey(); - Index[] indices = entry.getValue(); - int numFailures = randomIntBetween(1, 10); - ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFailures]; - for (int j = 0; j < numFailures; j++) { - ShardId shardId = new ShardId(randomFrom(indices), j); - SearchShardTarget searchShardTarget = new SearchShardTarget(randomAlphaOfLength(6), shardId, clusterAlias); - ShardSearchFailure failure = new ShardSearchFailure(new IllegalArgumentException(), searchShardTarget); - shardSearchFailures[j] = failure; - priorityQueue.add(Tuple.tuple(searchShardTarget, failure)); - } - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 1, - 1, + try ( + SearchResponseMerger merger = new SearchResponseMerger( + 0, 0, - 100L, - shardSearchFailures, - SearchResponse.Clusters.EMPTY + SearchContext.TRACK_TOTAL_HITS_ACCURATE, + searchTimeProvider, + emptyReduceContextBuilder() + ) + ) { + PriorityQueue> priorityQueue = new PriorityQueue<>( + Comparator.comparing(Tuple::v1, (o1, o2) -> { + int compareTo = o1.getShardId().compareTo(o2.getShardId()); + if (compareTo != 0) { + return compareTo; + } + return o1.getClusterAlias().compareTo(o2.getClusterAlias()); + }) ); - addResponse(merger, searchResponse); - } - awaitResponsesAdded(); - assertEquals(numResponses, merger.numResponses()); - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); - assertSame(clusters, mergedResponse.getClusters()); - assertEquals(numResponses, mergedResponse.getTotalShards()); - assertEquals(numResponses, mergedResponse.getSuccessfulShards()); - assertEquals(0, mergedResponse.getSkippedShards()); - assertEquals(priorityQueue.size(), mergedResponse.getFailedShards()); - ShardSearchFailure[] shardFailures = mergedResponse.getShardFailures(); - assertEquals(priorityQueue.size(), shardFailures.length); - for (ShardSearchFailure shardFailure : shardFailures) { - ShardSearchFailure expected = priorityQueue.poll().v2(); - assertSame(expected, shardFailure); + int numIndices = numResponses * randomIntBetween(1, 3); + Iterator> indicesPerCluster = randomRealisticIndices(numIndices, numResponses).entrySet().iterator(); + for (int i = 0; i < numResponses; i++) { + Map.Entry entry = indicesPerCluster.next(); + String clusterAlias = entry.getKey(); + Index[] indices = entry.getValue(); + int numFailures = randomIntBetween(1, 10); + ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFailures]; + for (int j = 0; j < numFailures; j++) { + ShardId shardId = new ShardId(randomFrom(indices), j); + SearchShardTarget searchShardTarget = new SearchShardTarget(randomAlphaOfLength(6), shardId, clusterAlias); + ShardSearchFailure failure = new ShardSearchFailure(new IllegalArgumentException(), searchShardTarget); + shardSearchFailures[j] = failure; + priorityQueue.add(Tuple.tuple(searchShardTarget, failure)); + } + SearchResponse searchResponse = new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 1, + 1, + 0, + 100L, + shardSearchFailures, + SearchResponse.Clusters.EMPTY + ); + try { + addResponse(merger, searchResponse); + } finally { + searchResponse.decRef(); + } + } + awaitResponsesAdded(); + assertEquals(numResponses, merger.numResponses()); + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + SearchResponse mergedResponse = merger.getMergedResponse(clusters); + try { + assertSame(clusters, mergedResponse.getClusters()); + assertEquals(numResponses, mergedResponse.getTotalShards()); + assertEquals(numResponses, mergedResponse.getSuccessfulShards()); + assertEquals(0, mergedResponse.getSkippedShards()); + assertEquals(priorityQueue.size(), mergedResponse.getFailedShards()); + ShardSearchFailure[] shardFailures = mergedResponse.getShardFailures(); + assertEquals(priorityQueue.size(), shardFailures.length); + for (ShardSearchFailure shardFailure : shardFailures) { + ShardSearchFailure expected = priorityQueue.poll().v2(); + assertSame(expected, shardFailure); + } + } finally { + mergedResponse.decRef(); + } } } public void testMergeShardFailuresNullShardTarget() throws InterruptedException { SearchTimeProvider searchTimeProvider = new SearchTimeProvider(0, 0, () -> 0); - SearchResponseMerger merger = new SearchResponseMerger( - 0, - 0, - SearchContext.TRACK_TOTAL_HITS_ACCURATE, - searchTimeProvider, - emptyReduceContextBuilder() - ); - PriorityQueue> priorityQueue = new PriorityQueue<>(Comparator.comparing(Tuple::v1)); - for (int i = 0; i < numResponses; i++) { - int numFailures = randomIntBetween(1, 10); - ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFailures]; - for (int j = 0; j < numFailures; j++) { - String index = "index-" + i; - ShardId shardId = new ShardId(index, index + "-uuid", j); - ElasticsearchException elasticsearchException = new ElasticsearchException(new IllegalArgumentException()); - elasticsearchException.setShard(shardId); - ShardSearchFailure failure = new ShardSearchFailure(elasticsearchException); - shardSearchFailures[j] = failure; - priorityQueue.add(Tuple.tuple(shardId, failure)); - } - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 1, - 1, + try ( + SearchResponseMerger merger = new SearchResponseMerger( 0, - 100L, - shardSearchFailures, - SearchResponse.Clusters.EMPTY - ); - addResponse(merger, searchResponse); - } - awaitResponsesAdded(); - assertEquals(numResponses, merger.numResponses()); - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); - assertSame(clusters, mergedResponse.getClusters()); - assertEquals(numResponses, mergedResponse.getTotalShards()); - assertEquals(numResponses, mergedResponse.getSuccessfulShards()); - assertEquals(0, mergedResponse.getSkippedShards()); - assertEquals(priorityQueue.size(), mergedResponse.getFailedShards()); - ShardSearchFailure[] shardFailures = mergedResponse.getShardFailures(); - assertEquals(priorityQueue.size(), shardFailures.length); - for (ShardSearchFailure shardFailure : shardFailures) { - ShardSearchFailure expected = priorityQueue.poll().v2(); - assertSame(expected, shardFailure); + 0, + SearchContext.TRACK_TOTAL_HITS_ACCURATE, + searchTimeProvider, + emptyReduceContextBuilder() + ) + ) { + PriorityQueue> priorityQueue = new PriorityQueue<>(Comparator.comparing(Tuple::v1)); + for (int i = 0; i < numResponses; i++) { + int numFailures = randomIntBetween(1, 10); + ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFailures]; + for (int j = 0; j < numFailures; j++) { + String index = "index-" + i; + ShardId shardId = new ShardId(index, index + "-uuid", j); + ElasticsearchException elasticsearchException = new ElasticsearchException(new IllegalArgumentException()); + elasticsearchException.setShard(shardId); + ShardSearchFailure failure = new ShardSearchFailure(elasticsearchException); + shardSearchFailures[j] = failure; + priorityQueue.add(Tuple.tuple(shardId, failure)); + } + SearchResponse searchResponse = new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 1, + 1, + 0, + 100L, + shardSearchFailures, + SearchResponse.Clusters.EMPTY + ); + try { + addResponse(merger, searchResponse); + } finally { + searchResponse.decRef(); + } + } + awaitResponsesAdded(); + assertEquals(numResponses, merger.numResponses()); + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + SearchResponse mergedResponse = merger.getMergedResponse(clusters); + try { + assertSame(clusters, mergedResponse.getClusters()); + assertEquals(numResponses, mergedResponse.getTotalShards()); + assertEquals(numResponses, mergedResponse.getSuccessfulShards()); + assertEquals(0, mergedResponse.getSkippedShards()); + assertEquals(priorityQueue.size(), mergedResponse.getFailedShards()); + ShardSearchFailure[] shardFailures = mergedResponse.getShardFailures(); + assertEquals(priorityQueue.size(), shardFailures.length); + for (ShardSearchFailure shardFailure : shardFailures) { + ShardSearchFailure expected = priorityQueue.poll().v2(); + assertSame(expected, shardFailure); + } + } finally { + mergedResponse.decRef(); + } } } public void testMergeShardFailuresNullShardId() throws InterruptedException { SearchTimeProvider searchTimeProvider = new SearchTimeProvider(0, 0, () -> 0); - SearchResponseMerger merger = new SearchResponseMerger( - 0, - 0, - SearchContext.TRACK_TOTAL_HITS_ACCURATE, - searchTimeProvider, - emptyReduceContextBuilder() - ); - List expectedFailures = new ArrayList<>(); - for (int i = 0; i < numResponses; i++) { - int numFailures = randomIntBetween(1, 50); - ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFailures]; - for (int j = 0; j < numFailures; j++) { - ShardSearchFailure shardSearchFailure = new ShardSearchFailure(new ElasticsearchException(new IllegalArgumentException())); - shardSearchFailures[j] = shardSearchFailure; - expectedFailures.add(shardSearchFailure); - } - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 1, - 1, + try ( + SearchResponseMerger merger = new SearchResponseMerger( 0, - 100L, - shardSearchFailures, - SearchResponse.Clusters.EMPTY - ); - addResponse(merger, searchResponse); + 0, + SearchContext.TRACK_TOTAL_HITS_ACCURATE, + searchTimeProvider, + emptyReduceContextBuilder() + ) + ) { + List expectedFailures = new ArrayList<>(); + for (int i = 0; i < numResponses; i++) { + int numFailures = randomIntBetween(1, 50); + ShardSearchFailure[] shardSearchFailures = new ShardSearchFailure[numFailures]; + for (int j = 0; j < numFailures; j++) { + ShardSearchFailure shardSearchFailure = new ShardSearchFailure( + new ElasticsearchException(new IllegalArgumentException()) + ); + shardSearchFailures[j] = shardSearchFailure; + expectedFailures.add(shardSearchFailure); + } + SearchResponse searchResponse = new SearchResponse( + InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + null, + 1, + 1, + 0, + 100L, + shardSearchFailures, + SearchResponse.Clusters.EMPTY + ); + try { + addResponse(merger, searchResponse); + } finally { + searchResponse.decRef(); + } + } + awaitResponsesAdded(); + assertEquals(numResponses, merger.numResponses()); + var mergedResponse = merger.getMergedResponse(SearchResponse.Clusters.EMPTY); + try { + ShardSearchFailure[] shardFailures = mergedResponse.getShardFailures(); + assertThat(Arrays.asList(shardFailures), containsInAnyOrder(expectedFailures.toArray(ShardSearchFailure.EMPTY_ARRAY))); + } finally { + mergedResponse.decRef(); + } } - awaitResponsesAdded(); - assertEquals(numResponses, merger.numResponses()); - ShardSearchFailure[] shardFailures = merger.getMergedResponse(SearchResponse.Clusters.EMPTY).getShardFailures(); - assertThat(Arrays.asList(shardFailures), containsInAnyOrder(expectedFailures.toArray(ShardSearchFailure.EMPTY_ARRAY))); } public void testMergeProfileResults() throws InterruptedException { SearchTimeProvider searchTimeProvider = new SearchTimeProvider(0, 0, () -> 0); - SearchResponseMerger merger = new SearchResponseMerger( - 0, - 0, - SearchContext.TRACK_TOTAL_HITS_ACCURATE, - searchTimeProvider, - emptyReduceContextBuilder() - ); - Map expectedProfile = new HashMap<>(); - for (int i = 0; i < numResponses; i++) { - SearchProfileResults profile = SearchProfileResultsTests.createTestItem(); - expectedProfile.putAll(profile.getShardResults()); - SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, profile, false, null, 1); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, - null, - 1, - 1, + try ( + SearchResponseMerger merger = new SearchResponseMerger( 0, - 100L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); - addResponse(merger, searchResponse); + 0, + SearchContext.TRACK_TOTAL_HITS_ACCURATE, + searchTimeProvider, + emptyReduceContextBuilder() + ) + ) { + Map expectedProfile = new HashMap<>(); + for (int i = 0; i < numResponses; i++) { + SearchProfileResults profile = SearchProfileResultsTests.createTestItem(); + expectedProfile.putAll(profile.getShardResults()); + SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); + InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, profile, false, null, 1); + SearchResponse searchResponse = new SearchResponse( + internalSearchResponse, + null, + 1, + 1, + 0, + 100L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + try { + addResponse(merger, searchResponse); + } finally { + searchResponse.decRef(); + } + } + awaitResponsesAdded(); + assertEquals(numResponses, merger.numResponses()); + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + SearchResponse mergedResponse = merger.getMergedResponse(clusters); + try { + assertSame(clusters, mergedResponse.getClusters()); + assertEquals(numResponses, mergedResponse.getTotalShards()); + assertEquals(numResponses, mergedResponse.getSuccessfulShards()); + assertEquals(0, mergedResponse.getSkippedShards()); + assertEquals(0, mergedResponse.getFailedShards()); + assertEquals(0, mergedResponse.getShardFailures().length); + assertEquals(expectedProfile, mergedResponse.getProfileResults()); + } finally { + mergedResponse.decRef(); + } } - awaitResponsesAdded(); - assertEquals(numResponses, merger.numResponses()); - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); - assertSame(clusters, mergedResponse.getClusters()); - assertEquals(numResponses, mergedResponse.getTotalShards()); - assertEquals(numResponses, mergedResponse.getSuccessfulShards()); - assertEquals(0, mergedResponse.getSkippedShards()); - assertEquals(0, mergedResponse.getFailedShards()); - assertEquals(0, mergedResponse.getShardFailures().length); - assertEquals(expectedProfile, mergedResponse.getProfileResults()); } public void testMergeCompletionSuggestions() throws InterruptedException { String suggestionName = randomAlphaOfLengthBetween(4, 8); int size = randomIntBetween(1, 100); - SearchResponseMerger searchResponseMerger = new SearchResponseMerger( - 0, - 0, - 0, - new SearchTimeProvider(0, 0, () -> 0), - emptyReduceContextBuilder() - ); - for (int i = 0; i < numResponses; i++) { - List>> suggestions = - new ArrayList<>(); - CompletionSuggestion completionSuggestion = new CompletionSuggestion(suggestionName, size, false); - CompletionSuggestion.Entry options = new CompletionSuggestion.Entry(new Text("suggest"), 0, 10); - int docId = randomIntBetween(0, Integer.MAX_VALUE); - CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( - docId, - new Text(randomAlphaOfLengthBetween(5, 10)), - i, - Collections.emptyMap() - ); - SearchHit hit = new SearchHit(docId); - ShardId shardId = new ShardId( - randomAlphaOfLengthBetween(5, 10), - randomAlphaOfLength(10), - randomIntBetween(0, Integer.MAX_VALUE) - ); - String clusterAlias = randomBoolean() ? "" : randomAlphaOfLengthBetween(5, 10); - hit.shard(new SearchShardTarget("node", shardId, clusterAlias)); - option.setHit(hit); - options.addOption(option); - completionSuggestion.addTerm(options); - suggestions.add(completionSuggestion); - Suggest suggest = new Suggest(suggestions); - SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, suggest, null, false, null, 1); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, - null, - 1, - 1, + try ( + SearchResponseMerger searchResponseMerger = new SearchResponseMerger( 0, - randomLong(), - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); - addResponse(searchResponseMerger, searchResponse); - } - awaitResponsesAdded(); - assertEquals(numResponses, searchResponseMerger.numResponses()); - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); - assertSame(clusters, mergedResponse.getClusters()); - assertEquals(numResponses, mergedResponse.getTotalShards()); - assertEquals(numResponses, mergedResponse.getSuccessfulShards()); - assertEquals(0, mergedResponse.getSkippedShards()); - assertEquals(0, mergedResponse.getFailedShards()); - assertEquals(0, mergedResponse.getShardFailures().length); - Suggest.Suggestion> suggestion = mergedResponse - .getSuggest() - .getSuggestion(suggestionName); - assertEquals(1, suggestion.getEntries().size()); - Suggest.Suggestion.Entry options = suggestion.getEntries().get(0); - assertEquals(Math.min(numResponses, size), options.getOptions().size()); - int i = numResponses; - for (Suggest.Suggestion.Entry.Option option : options) { - assertEquals(--i, option.getScore(), 0f); + 0, + 0, + new SearchTimeProvider(0, 0, () -> 0), + emptyReduceContextBuilder() + ) + ) { + for (int i = 0; i < numResponses; i++) { + List>> suggestions = + new ArrayList<>(); + CompletionSuggestion completionSuggestion = new CompletionSuggestion(suggestionName, size, false); + CompletionSuggestion.Entry options = new CompletionSuggestion.Entry(new Text("suggest"), 0, 10); + int docId = randomIntBetween(0, Integer.MAX_VALUE); + CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( + docId, + new Text(randomAlphaOfLengthBetween(5, 10)), + i, + Collections.emptyMap() + ); + SearchHit hit = new SearchHit(docId); + ShardId shardId = new ShardId( + randomAlphaOfLengthBetween(5, 10), + randomAlphaOfLength(10), + randomIntBetween(0, Integer.MAX_VALUE) + ); + String clusterAlias = randomBoolean() ? "" : randomAlphaOfLengthBetween(5, 10); + hit.shard(new SearchShardTarget("node", shardId, clusterAlias)); + option.setHit(hit); + options.addOption(option); + completionSuggestion.addTerm(options); + suggestions.add(completionSuggestion); + Suggest suggest = new Suggest(suggestions); + SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); + InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, suggest, null, false, null, 1); + SearchResponse searchResponse = new SearchResponse( + internalSearchResponse, + null, + 1, + 1, + 0, + randomLong(), + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + try { + addResponse(searchResponseMerger, searchResponse); + } finally { + searchResponse.decRef(); + } + } + awaitResponsesAdded(); + assertEquals(numResponses, searchResponseMerger.numResponses()); + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + try { + assertSame(clusters, mergedResponse.getClusters()); + assertEquals(numResponses, mergedResponse.getTotalShards()); + assertEquals(numResponses, mergedResponse.getSuccessfulShards()); + assertEquals(0, mergedResponse.getSkippedShards()); + assertEquals(0, mergedResponse.getFailedShards()); + assertEquals(0, mergedResponse.getShardFailures().length); + Suggest.Suggestion> suggestion = + mergedResponse.getSuggest().getSuggestion(suggestionName); + assertEquals(1, suggestion.getEntries().size()); + Suggest.Suggestion.Entry options = suggestion.getEntries().get(0); + assertEquals(Math.min(numResponses, size), options.getOptions().size()); + int i = numResponses; + for (Suggest.Suggestion.Entry.Option option : options) { + assertEquals(--i, option.getScore(), 0f); + } + } finally { + mergedResponse.decRef(); + } } } public void testMergeCompletionSuggestionsTieBreak() throws InterruptedException { String suggestionName = randomAlphaOfLengthBetween(4, 8); int size = randomIntBetween(1, 100); - SearchResponseMerger searchResponseMerger = new SearchResponseMerger( - 0, - 0, - 0, - new SearchTimeProvider(0, 0, () -> 0), - emptyReduceContextBuilder() - ); - for (int i = 0; i < numResponses; i++) { - List>> suggestions = - new ArrayList<>(); - CompletionSuggestion completionSuggestion = new CompletionSuggestion(suggestionName, size, false); - CompletionSuggestion.Entry options = new CompletionSuggestion.Entry(new Text("suggest"), 0, 10); - int docId = randomIntBetween(0, Integer.MAX_VALUE); - CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( - docId, - new Text("suggestion"), - 1F, - Collections.emptyMap() - ); - SearchHit searchHit = new SearchHit(docId); - searchHit.shard( - new SearchShardTarget( - "node", - new ShardId("index", "uuid", randomIntBetween(0, Integer.MAX_VALUE)), - randomBoolean() ? RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY : randomAlphaOfLengthBetween(5, 10) - ) - ); - option.setHit(searchHit); - options.addOption(option); - completionSuggestion.addTerm(options); - suggestions.add(completionSuggestion); - Suggest suggest = new Suggest(suggestions); - SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, suggest, null, false, null, 1); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, - null, - 1, - 1, + try ( + SearchResponseMerger searchResponseMerger = new SearchResponseMerger( 0, - randomLong(), - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); - addResponse(searchResponseMerger, searchResponse); - } - awaitResponsesAdded(); - assertEquals(numResponses, searchResponseMerger.numResponses()); - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); - assertSame(clusters, mergedResponse.getClusters()); - assertEquals(numResponses, mergedResponse.getTotalShards()); - assertEquals(numResponses, mergedResponse.getSuccessfulShards()); - assertEquals(0, mergedResponse.getSkippedShards()); - assertEquals(0, mergedResponse.getFailedShards()); - assertEquals(0, mergedResponse.getShardFailures().length); - CompletionSuggestion suggestion = mergedResponse.getSuggest().getSuggestion(suggestionName); - assertEquals(1, suggestion.getEntries().size()); - CompletionSuggestion.Entry options = suggestion.getEntries().get(0); - assertEquals(Math.min(numResponses, size), options.getOptions().size()); - int lastShardId = 0; - String lastClusterAlias = null; - for (CompletionSuggestion.Entry.Option option : options) { - assertEquals("suggestion", option.getText().string()); - SearchShardTarget shard = option.getHit().getShard(); - int currentShardId = shard.getShardId().id(); - assertThat(currentShardId, greaterThanOrEqualTo(lastShardId)); - if (currentShardId == lastShardId) { - assertThat(shard.getClusterAlias(), greaterThan(lastClusterAlias)); - } else { - lastShardId = currentShardId; + 0, + 0, + new SearchTimeProvider(0, 0, () -> 0), + emptyReduceContextBuilder() + ) + ) { + for (int i = 0; i < numResponses; i++) { + List>> suggestions = + new ArrayList<>(); + CompletionSuggestion completionSuggestion = new CompletionSuggestion(suggestionName, size, false); + CompletionSuggestion.Entry options = new CompletionSuggestion.Entry(new Text("suggest"), 0, 10); + int docId = randomIntBetween(0, Integer.MAX_VALUE); + CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option( + docId, + new Text("suggestion"), + 1F, + Collections.emptyMap() + ); + SearchHit searchHit = new SearchHit(docId); + searchHit.shard( + new SearchShardTarget( + "node", + new ShardId("index", "uuid", randomIntBetween(0, Integer.MAX_VALUE)), + randomBoolean() ? RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY : randomAlphaOfLengthBetween(5, 10) + ) + ); + option.setHit(searchHit); + options.addOption(option); + completionSuggestion.addTerm(options); + suggestions.add(completionSuggestion); + Suggest suggest = new Suggest(suggestions); + SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); + InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, suggest, null, false, null, 1); + SearchResponse searchResponse = new SearchResponse( + internalSearchResponse, + null, + 1, + 1, + 0, + randomLong(), + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + try { + addResponse(searchResponseMerger, searchResponse); + } finally { + searchResponse.decRef(); + } + } + awaitResponsesAdded(); + assertEquals(numResponses, searchResponseMerger.numResponses()); + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + try { + assertSame(clusters, mergedResponse.getClusters()); + assertEquals(numResponses, mergedResponse.getTotalShards()); + assertEquals(numResponses, mergedResponse.getSuccessfulShards()); + assertEquals(0, mergedResponse.getSkippedShards()); + assertEquals(0, mergedResponse.getFailedShards()); + assertEquals(0, mergedResponse.getShardFailures().length); + CompletionSuggestion suggestion = mergedResponse.getSuggest().getSuggestion(suggestionName); + assertEquals(1, suggestion.getEntries().size()); + CompletionSuggestion.Entry options = suggestion.getEntries().get(0); + assertEquals(Math.min(numResponses, size), options.getOptions().size()); + int lastShardId = 0; + String lastClusterAlias = null; + for (CompletionSuggestion.Entry.Option option : options) { + assertEquals("suggestion", option.getText().string()); + SearchShardTarget shard = option.getHit().getShard(); + int currentShardId = shard.getShardId().id(); + assertThat(currentShardId, greaterThanOrEqualTo(lastShardId)); + if (currentShardId == lastShardId) { + assertThat(shard.getClusterAlias(), greaterThan(lastClusterAlias)); + } else { + lastShardId = currentShardId; + } + lastClusterAlias = shard.getClusterAlias(); + } + } finally { + mergedResponse.decRef(); } - lastClusterAlias = shard.getClusterAlias(); } } @@ -476,101 +555,123 @@ public void testMergeEmptyFormat() throws InterruptedException { ); SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); - SearchResponseMerger searchResponseMerger = new SearchResponseMerger( - 0, - 0, - 0, - new SearchTimeProvider(0, 0, () -> 0), - emptyReduceContextBuilder(new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder("field1"))) - ); - for (Max max : Arrays.asList(max1, max2)) { - InternalAggregations aggs = InternalAggregations.from(Arrays.asList(max)); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, aggs, null, null, false, null, 1); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, - null, - 1, - 1, + try ( + SearchResponseMerger searchResponseMerger = new SearchResponseMerger( 0, - randomLong(), - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); - searchResponseMerger.add(searchResponse); + 0, + 0, + new SearchTimeProvider(0, 0, () -> 0), + emptyReduceContextBuilder(new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder("field1"))) + ) + ) { + for (Max max : Arrays.asList(max1, max2)) { + InternalAggregations aggs = InternalAggregations.from(Arrays.asList(max)); + InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, aggs, null, null, false, null, 1); + SearchResponse searchResponse = new SearchResponse( + internalSearchResponse, + null, + 1, + 1, + 0, + randomLong(), + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + try { + searchResponseMerger.add(searchResponse); + } finally { + searchResponse.decRef(); + } + } + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + SearchResponse searchResponse = searchResponseMerger.getMergedResponse(clusters); + try { + Max mergedMax = searchResponse.getAggregations().get("field1"); + assertEquals(mergedMax.getValueAsString(), "2021-05-01T00:00:00.000Z"); + } finally { + searchResponse.decRef(); + } } - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse searchResponse = searchResponseMerger.getMergedResponse(clusters); - Max mergedMax = searchResponse.getAggregations().get("field1"); - assertEquals(mergedMax.getValueAsString(), "2021-05-01T00:00:00.000Z"); } public void testMergeAggs() throws InterruptedException { String maxAggName = randomAlphaOfLengthBetween(5, 8); String rangeAggName = randomAlphaOfLengthBetween(5, 8); - SearchResponseMerger searchResponseMerger = new SearchResponseMerger( - 0, - 0, - 0, - new SearchTimeProvider(0, 0, () -> 0), - emptyReduceContextBuilder( - new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder(maxAggName)) - .addAggregator(new DateRangeAggregationBuilder(rangeAggName)) - ) - ); - int totalCount = 0; - double maxValue = Double.MIN_VALUE; - for (int i = 0; i < numResponses; i++) { - double value = randomDouble(); - maxValue = Math.max(value, maxValue); - Max max = new Max(maxAggName, value, DocValueFormat.RAW, Collections.emptyMap()); - InternalDateRange.Factory factory = new InternalDateRange.Factory(); - int count = randomIntBetween(1, 1000); - totalCount += count; - InternalDateRange.Bucket bucket = factory.createBucket( - "bucket", - 0D, - 10000D, - count, - InternalAggregations.EMPTY, - false, - DocValueFormat.RAW - ); - InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); - InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max)); - SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, aggs, null, null, false, null, 1); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, - null, - 1, - 1, + try ( + SearchResponseMerger searchResponseMerger = new SearchResponseMerger( 0, - randomLong(), - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); - addResponse(searchResponseMerger, searchResponse); + 0, + 0, + new SearchTimeProvider(0, 0, () -> 0), + emptyReduceContextBuilder( + new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder(maxAggName)) + .addAggregator(new DateRangeAggregationBuilder(rangeAggName)) + ) + ) + ) { + int totalCount = 0; + double maxValue = Double.MIN_VALUE; + for (int i = 0; i < numResponses; i++) { + double value = randomDouble(); + maxValue = Math.max(value, maxValue); + Max max = new Max(maxAggName, value, DocValueFormat.RAW, Collections.emptyMap()); + InternalDateRange.Factory factory = new InternalDateRange.Factory(); + int count = randomIntBetween(1, 1000); + totalCount += count; + InternalDateRange.Bucket bucket = factory.createBucket( + "bucket", + 0D, + 10000D, + count, + InternalAggregations.EMPTY, + false, + DocValueFormat.RAW + ); + InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); + InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max)); + SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); + InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, aggs, null, null, false, null, 1); + SearchResponse searchResponse = new SearchResponse( + internalSearchResponse, + null, + 1, + 1, + 0, + randomLong(), + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + try { + addResponse(searchResponseMerger, searchResponse); + } finally { + searchResponse.decRef(); + } + } + awaitResponsesAdded(); + assertEquals(numResponses, searchResponseMerger.numResponses()); + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + try { + assertSame(clusters, mergedResponse.getClusters()); + assertEquals(numResponses, mergedResponse.getTotalShards()); + assertEquals(numResponses, mergedResponse.getSuccessfulShards()); + assertEquals(0, mergedResponse.getSkippedShards()); + assertEquals(0, mergedResponse.getFailedShards()); + assertEquals(0, mergedResponse.getShardFailures().length); + assertEquals(0, mergedResponse.getHits().getHits().length); + assertEquals(2, mergedResponse.getAggregations().asList().size()); + Max max = mergedResponse.getAggregations().get(maxAggName); + assertEquals(maxValue, max.value(), 0d); + Range range = mergedResponse.getAggregations().get(rangeAggName); + assertEquals(1, range.getBuckets().size()); + Range.Bucket bucket = range.getBuckets().get(0); + assertEquals("0.0", bucket.getFromAsString()); + assertEquals("10000.0", bucket.getToAsString()); + assertEquals(totalCount, bucket.getDocCount()); + } finally { + mergedResponse.decRef(); + } } - awaitResponsesAdded(); - assertEquals(numResponses, searchResponseMerger.numResponses()); - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); - assertSame(clusters, mergedResponse.getClusters()); - assertEquals(numResponses, mergedResponse.getTotalShards()); - assertEquals(numResponses, mergedResponse.getSuccessfulShards()); - assertEquals(0, mergedResponse.getSkippedShards()); - assertEquals(0, mergedResponse.getFailedShards()); - assertEquals(0, mergedResponse.getShardFailures().length); - assertEquals(0, mergedResponse.getHits().getHits().length); - assertEquals(2, mergedResponse.getAggregations().asList().size()); - Max max = mergedResponse.getAggregations().get(maxAggName); - assertEquals(maxValue, max.value(), 0d); - Range range = mergedResponse.getAggregations().get(rangeAggName); - assertEquals(1, range.getBuckets().size()); - Range.Bucket bucket = range.getBuckets().get(0); - assertEquals("0.0", bucket.getFromAsString()); - assertEquals("10000.0", bucket.getToAsString()); - assertEquals(totalCount, bucket.getDocCount()); } public void testMergeSearchHits() throws InterruptedException { @@ -606,253 +707,294 @@ public void testMergeSearchHits() throws InterruptedException { TotalHits.Relation totalHitsRelation = randomTrackTotalHits.v2(); PriorityQueue priorityQueue = new PriorityQueue<>(new SearchHitComparator(sortFields)); - SearchResponseMerger searchResponseMerger = new SearchResponseMerger( - from, - size, - trackTotalHitsUpTo, - timeProvider, - emptyReduceContextBuilder() - ); - - TotalHits expectedTotalHits = null; - int expectedTotal = 0; - int expectedSuccessful = 0; - int expectedSkipped = 0; - int expectedReducePhases = 1; - boolean expectedTimedOut = false; - Boolean expectedTerminatedEarly = null; - float expectedMaxScore = Float.NEGATIVE_INFINITY; - int numIndices = requestedSize == 0 ? 0 : randomIntBetween(1, requestedSize); - Iterator> indicesIterator = randomRealisticIndices(numIndices, numResponses).entrySet().iterator(); - boolean hasHits = false; - for (int i = 0; i < numResponses; i++) { - Map.Entry entry = indicesIterator.next(); - String clusterAlias = entry.getKey(); - Index[] indices = entry.getValue(); - int total = randomIntBetween(1, 1000); - expectedTotal += total; - int successful = randomIntBetween(1, total); - expectedSuccessful += successful; - int skipped = randomIntBetween(1, total); - expectedSkipped += skipped; - - TotalHits totalHits = null; - if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { - totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); - } + try ( + SearchResponseMerger searchResponseMerger = new SearchResponseMerger( + from, + size, + trackTotalHitsUpTo, + timeProvider, + emptyReduceContextBuilder() + ) + ) { - final int numDocs = totalHits == null || totalHits.value >= requestedSize ? requestedSize : (int) totalHits.value; - int scoreFactor = randomIntBetween(1, numResponses); - float maxScore = scoreSort ? numDocs * scoreFactor : Float.NaN; - SearchHit[] hits = randomSearchHitArray( - numDocs, - numResponses, - clusterAlias, - indices, - maxScore, - scoreFactor, - sortFields, - priorityQueue - ); - hasHits |= hits.length > 0; - expectedMaxScore = Math.max(expectedMaxScore, maxScore); + TotalHits expectedTotalHits = null; + int expectedTotal = 0; + int expectedSuccessful = 0; + int expectedSkipped = 0; + int expectedReducePhases = 1; + boolean expectedTimedOut = false; + Boolean expectedTerminatedEarly = null; + float expectedMaxScore = Float.NEGATIVE_INFINITY; + int numIndices = requestedSize == 0 ? 0 : randomIntBetween(1, requestedSize); + Iterator> indicesIterator = randomRealisticIndices(numIndices, numResponses).entrySet().iterator(); + boolean hasHits = false; + for (int i = 0; i < numResponses; i++) { + Map.Entry entry = indicesIterator.next(); + String clusterAlias = entry.getKey(); + Index[] indices = entry.getValue(); + int total = randomIntBetween(1, 1000); + expectedTotal += total; + int successful = randomIntBetween(1, total); + expectedSuccessful += successful; + int skipped = randomIntBetween(1, total); + expectedSkipped += skipped; - Object[] collapseValues = null; - if (collapseField != null) { - collapseValues = new Object[numDocs]; - for (int j = 0; j < numDocs; j++) { - // set different collapse values for each cluster for simplicity - collapseValues[j] = j + 1000 * i; + TotalHits totalHits = null; + if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { + totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); } - } - SearchHits searchHits = new SearchHits( - hits, - totalHits, - maxScore == Float.NEGATIVE_INFINITY ? Float.NaN : maxScore, - sortFields, - collapseField, - collapseValues - ); + final int numDocs = totalHits == null || totalHits.value >= requestedSize ? requestedSize : (int) totalHits.value; + int scoreFactor = randomIntBetween(1, numResponses); + float maxScore = scoreSort ? numDocs * scoreFactor : Float.NaN; + SearchHit[] hits = randomSearchHitArray( + numDocs, + numResponses, + clusterAlias, + indices, + maxScore, + scoreFactor, + sortFields, + priorityQueue + ); + hasHits |= hits.length > 0; + expectedMaxScore = Math.max(expectedMaxScore, maxScore); - int numReducePhases = randomIntBetween(1, 5); - expectedReducePhases += numReducePhases; - boolean timedOut = rarely(); - expectedTimedOut = expectedTimedOut || timedOut; - Boolean terminatedEarly = frequently() ? null : true; - expectedTerminatedEarly = expectedTerminatedEarly == null ? terminatedEarly : expectedTerminatedEarly; + Object[] collapseValues = null; + if (collapseField != null) { + collapseValues = new Object[numDocs]; + for (int j = 0; j < numDocs; j++) { + // set different collapse values for each cluster for simplicity + collapseValues[j] = j + 1000 * i; + } + } - InternalSearchResponse internalSearchResponse = new InternalSearchResponse( - searchHits, - null, - null, - null, - timedOut, - terminatedEarly, - numReducePhases - ); + SearchHits searchHits = new SearchHits( + hits, + totalHits, + maxScore == Float.NEGATIVE_INFINITY ? Float.NaN : maxScore, + sortFields, + collapseField, + collapseValues + ); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, - null, - total, - successful, - skipped, - randomLong(), - ShardSearchFailure.EMPTY_ARRAY, - SearchResponseTests.randomClusters() - ); + int numReducePhases = randomIntBetween(1, 5); + expectedReducePhases += numReducePhases; + boolean timedOut = rarely(); + expectedTimedOut = expectedTimedOut || timedOut; + Boolean terminatedEarly = frequently() ? null : true; + expectedTerminatedEarly = expectedTerminatedEarly == null ? terminatedEarly : expectedTerminatedEarly; - addResponse(searchResponseMerger, searchResponse); - } + InternalSearchResponse internalSearchResponse = new InternalSearchResponse( + searchHits, + null, + null, + null, + timedOut, + terminatedEarly, + numReducePhases + ); - awaitResponsesAdded(); - assertEquals(numResponses, searchResponseMerger.numResponses()); - final SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse searchResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse searchResponse = new SearchResponse( + internalSearchResponse, + null, + total, + successful, + skipped, + randomLong(), + ShardSearchFailure.EMPTY_ARRAY, + SearchResponseTests.randomClusters() + ); + try { + addResponse(searchResponseMerger, searchResponse); + } finally { + searchResponse.decRef(); + } + } - assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), searchResponse.getTook().millis()); - assertEquals(expectedTotal, searchResponse.getTotalShards()); - assertEquals(expectedSuccessful, searchResponse.getSuccessfulShards()); - assertEquals(expectedSkipped, searchResponse.getSkippedShards()); - assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(expectedReducePhases, searchResponse.getNumReducePhases()); - assertEquals(expectedTimedOut, searchResponse.isTimedOut()); - assertEquals(expectedTerminatedEarly, searchResponse.isTerminatedEarly()); + awaitResponsesAdded(); + assertEquals(numResponses, searchResponseMerger.numResponses()); + final SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + SearchResponse searchResponse = searchResponseMerger.getMergedResponse(clusters); + try { + assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), searchResponse.getTook().millis()); + assertEquals(expectedTotal, searchResponse.getTotalShards()); + assertEquals(expectedSuccessful, searchResponse.getSuccessfulShards()); + assertEquals(expectedSkipped, searchResponse.getSkippedShards()); + assertEquals(0, searchResponse.getShardFailures().length); + assertEquals(expectedReducePhases, searchResponse.getNumReducePhases()); + assertEquals(expectedTimedOut, searchResponse.isTimedOut()); + assertEquals(expectedTerminatedEarly, searchResponse.isTerminatedEarly()); - assertSame(clusters, searchResponse.getClusters()); - assertNull(searchResponse.getScrollId()); + assertSame(clusters, searchResponse.getClusters()); + assertNull(searchResponse.getScrollId()); - SearchHits searchHits = searchResponse.getHits(); - // the sort fields and the collapse field are not returned when hits are empty - if (hasHits) { - assertArrayEquals(sortFields, searchHits.getSortFields()); - assertEquals(collapseField, searchHits.getCollapseField()); - } else { - assertNull(searchHits.getSortFields()); - assertNull(searchHits.getCollapseField()); - } - if (expectedTotalHits == null) { - assertNull(searchHits.getTotalHits()); - } else { - assertNotNull(searchHits.getTotalHits()); - assertEquals(expectedTotalHits.value, searchHits.getTotalHits().value); - assertSame(expectedTotalHits.relation, searchHits.getTotalHits().relation); - } - if (expectedMaxScore == Float.NEGATIVE_INFINITY) { - assertTrue(Float.isNaN(searchHits.getMaxScore())); - } else { - assertEquals(expectedMaxScore, searchHits.getMaxScore(), 0f); - } + SearchHits searchHits = searchResponse.getHits(); + // the sort fields and the collapse field are not returned when hits are empty + if (hasHits) { + assertArrayEquals(sortFields, searchHits.getSortFields()); + assertEquals(collapseField, searchHits.getCollapseField()); + } else { + assertNull(searchHits.getSortFields()); + assertNull(searchHits.getCollapseField()); + } + if (expectedTotalHits == null) { + assertNull(searchHits.getTotalHits()); + } else { + assertNotNull(searchHits.getTotalHits()); + assertEquals(expectedTotalHits.value, searchHits.getTotalHits().value); + assertSame(expectedTotalHits.relation, searchHits.getTotalHits().relation); + } + if (expectedMaxScore == Float.NEGATIVE_INFINITY) { + assertTrue(Float.isNaN(searchHits.getMaxScore())); + } else { + assertEquals(expectedMaxScore, searchHits.getMaxScore(), 0f); + } - for (int i = 0; i < from; i++) { - priorityQueue.poll(); - } - SearchHit[] hits = searchHits.getHits(); - if (collapseField != null - // the collapse field is not returned when hits are empty - && hasHits) { - assertEquals(hits.length, searchHits.getCollapseValues().length); - } else { - assertNull(searchHits.getCollapseValues()); - } - assertThat(hits.length, lessThanOrEqualTo(size)); - for (SearchHit hit : hits) { - SearchHit expected = priorityQueue.poll(); - assertSame(expected, hit); + for (int i = 0; i < from; i++) { + priorityQueue.poll(); + } + SearchHit[] hits = searchHits.getHits(); + if (collapseField != null + // the collapse field is not returned when hits are empty + && hasHits) { + assertEquals(hits.length, searchHits.getCollapseValues().length); + } else { + assertNull(searchHits.getCollapseValues()); + } + assertThat(hits.length, lessThanOrEqualTo(size)); + for (SearchHit hit : hits) { + SearchHit expected = priorityQueue.poll(); + assertSame(expected, hit); + } + } finally { + searchResponse.decRef(); + } } } public void testMergeNoResponsesAdded() { long currentRelativeTime = randomNonNegativeLong(); final SearchTimeProvider timeProvider = new SearchTimeProvider(randomLong(), 0, () -> currentRelativeTime); - SearchResponseMerger merger = new SearchResponseMerger(0, 10, Integer.MAX_VALUE, timeProvider, emptyReduceContextBuilder()); - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - assertEquals(0, merger.numResponses()); - SearchResponse response = merger.getMergedResponse(clusters); - assertSame(clusters, response.getClusters()); - assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), response.getTook().millis()); - assertEquals(0, response.getTotalShards()); - assertEquals(0, response.getSuccessfulShards()); - assertEquals(0, response.getSkippedShards()); - assertEquals(0, response.getFailedShards()); - assertEquals(0, response.getNumReducePhases()); - assertFalse(response.isTimedOut()); - assertNotNull(response.getHits().getTotalHits()); - assertEquals(0, response.getHits().getTotalHits().value); - assertEquals(0, response.getHits().getHits().length); - assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation); - assertNull(response.getScrollId()); - assertSame(InternalAggregations.EMPTY, response.getAggregations()); - assertNull(response.getSuggest()); - assertEquals(0, response.getProfileResults().size()); - assertNull(response.isTerminatedEarly()); - assertEquals(0, response.getShardFailures().length); + try (SearchResponseMerger merger = new SearchResponseMerger(0, 10, Integer.MAX_VALUE, timeProvider, emptyReduceContextBuilder())) { + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + assertEquals(0, merger.numResponses()); + SearchResponse response = merger.getMergedResponse(clusters); + try { + assertSame(clusters, response.getClusters()); + assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), response.getTook().millis()); + assertEquals(0, response.getTotalShards()); + assertEquals(0, response.getSuccessfulShards()); + assertEquals(0, response.getSkippedShards()); + assertEquals(0, response.getFailedShards()); + assertEquals(0, response.getNumReducePhases()); + assertFalse(response.isTimedOut()); + assertNotNull(response.getHits().getTotalHits()); + assertEquals(0, response.getHits().getTotalHits().value); + assertEquals(0, response.getHits().getHits().length); + assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation); + assertNull(response.getScrollId()); + assertSame(InternalAggregations.EMPTY, response.getAggregations()); + assertNull(response.getSuggest()); + assertEquals(0, response.getProfileResults().size()); + assertNull(response.isTerminatedEarly()); + assertEquals(0, response.getShardFailures().length); + } finally { + response.decRef(); + } + } } public void testMergeEmptySearchHitsWithNonEmpty() { long currentRelativeTime = randomLong(); final SearchTimeProvider timeProvider = new SearchTimeProvider(randomLong(), 0, () -> currentRelativeTime); - SearchResponseMerger merger = new SearchResponseMerger(0, 10, Integer.MAX_VALUE, timeProvider, emptyReduceContextBuilder()); - SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - int numFields = randomIntBetween(1, 3); - SortField[] sortFields = new SortField[numFields]; - for (int i = 0; i < numFields; i++) { - sortFields[i] = new SortField("field-" + i, SortField.Type.INT, randomBoolean()); - } - PriorityQueue priorityQueue = new PriorityQueue<>(new SearchHitComparator(sortFields)); - SearchHit[] hits = randomSearchHitArray( - 10, - 1, - "remote", - new Index[] { new Index("index", "uuid") }, - Float.NaN, - 1, - sortFields, - priorityQueue - ); - { - SearchHits searchHits = new SearchHits(hits, new TotalHits(10, TotalHits.Relation.EQUAL_TO), Float.NaN, sortFields, null, null); - InternalSearchResponse response = new InternalSearchResponse(searchHits, null, null, null, false, false, 1); - SearchResponse searchResponse = new SearchResponse( - response, - null, - 1, - 1, - 0, - 1L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); - merger.add(searchResponse); - } - { - SearchHits empty = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN, null, null, null); - InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); - SearchResponse searchResponse = new SearchResponse( - response, - null, + try (SearchResponseMerger merger = new SearchResponseMerger(0, 10, Integer.MAX_VALUE, timeProvider, emptyReduceContextBuilder())) { + SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); + int numFields = randomIntBetween(1, 3); + SortField[] sortFields = new SortField[numFields]; + for (int i = 0; i < numFields; i++) { + sortFields[i] = new SortField("field-" + i, SortField.Type.INT, randomBoolean()); + } + PriorityQueue priorityQueue = new PriorityQueue<>(new SearchHitComparator(sortFields)); + SearchHit[] hits = randomSearchHitArray( + 10, 1, + "remote", + new Index[] { new Index("index", "uuid") }, + Float.NaN, 1, - 0, - 1L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY + sortFields, + priorityQueue ); - merger.add(searchResponse); + { + SearchHits searchHits = new SearchHits( + hits, + new TotalHits(10, TotalHits.Relation.EQUAL_TO), + Float.NaN, + sortFields, + null, + null + ); + InternalSearchResponse response = new InternalSearchResponse(searchHits, null, null, null, false, false, 1); + SearchResponse searchResponse = new SearchResponse( + response, + null, + 1, + 1, + 0, + 1L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + try { + merger.add(searchResponse); + } finally { + searchResponse.decRef(); + } + } + { + SearchHits empty = new SearchHits( + new SearchHit[0], + new TotalHits(0, TotalHits.Relation.EQUAL_TO), + Float.NaN, + null, + null, + null + ); + InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); + SearchResponse searchResponse = new SearchResponse( + response, + null, + 1, + 1, + 0, + 1L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + try { + merger.add(searchResponse); + } finally { + searchResponse.decRef(); + } + } + assertEquals(2, merger.numResponses()); + SearchResponse mergedResponse = merger.getMergedResponse(clusters); + try { + assertEquals(10, mergedResponse.getHits().getTotalHits().value); + assertEquals(10, mergedResponse.getHits().getHits().length); + assertEquals(2, mergedResponse.getTotalShards()); + assertEquals(2, mergedResponse.getSuccessfulShards()); + assertEquals(0, mergedResponse.getSkippedShards()); + assertArrayEquals(sortFields, mergedResponse.getHits().getSortFields()); + assertArrayEquals(hits, mergedResponse.getHits().getHits()); + assertEquals(clusters, mergedResponse.getClusters()); + } finally { + mergedResponse.decRef(); + } } - assertEquals(2, merger.numResponses()); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); - assertEquals(10, mergedResponse.getHits().getTotalHits().value); - assertEquals(10, mergedResponse.getHits().getHits().length); - assertEquals(2, mergedResponse.getTotalShards()); - assertEquals(2, mergedResponse.getSuccessfulShards()); - assertEquals(0, mergedResponse.getSkippedShards()); - assertArrayEquals(sortFields, mergedResponse.getHits().getSortFields()); - assertArrayEquals(hits, mergedResponse.getHits().getHits()); - assertEquals(clusters, mergedResponse.getClusters()); } public void testMergeOnlyEmptyHits() { @@ -862,32 +1004,41 @@ public void testMergeOnlyEmptyHits() { Tuple randomTrackTotalHits = randomTrackTotalHits(); int trackTotalHitsUpTo = randomTrackTotalHits.v1(); TotalHits.Relation totalHitsRelation = randomTrackTotalHits.v2(); - SearchResponseMerger merger = new SearchResponseMerger(0, 10, trackTotalHitsUpTo, timeProvider, emptyReduceContextBuilder()); - int numResponses = randomIntBetween(1, 5); - TotalHits expectedTotalHits = null; - for (int i = 0; i < numResponses; i++) { - TotalHits totalHits = null; - if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { - totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + try (SearchResponseMerger merger = new SearchResponseMerger(0, 10, trackTotalHitsUpTo, timeProvider, emptyReduceContextBuilder())) { + int numResponses = randomIntBetween(1, 5); + TotalHits expectedTotalHits = null; + for (int i = 0; i < numResponses; i++) { + TotalHits totalHits = null; + if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { + totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + } + SearchHits empty = new SearchHits(new SearchHit[0], totalHits, Float.NaN, null, null, null); + InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); + SearchResponse searchResponse = new SearchResponse( + response, + null, + 1, + 1, + 0, + 1L, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + try { + merger.add(searchResponse); + } finally { + searchResponse.decRef(); + } + } + SearchResponse mergedResponse = merger.getMergedResponse(clusters); + try { + assertEquals(expectedTotalHits, mergedResponse.getHits().getTotalHits()); + } finally { + mergedResponse.decRef(); } - SearchHits empty = new SearchHits(new SearchHit[0], totalHits, Float.NaN, null, null, null); - InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); - SearchResponse searchResponse = new SearchResponse( - response, - null, - 1, - 1, - 0, - 1L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); - merger.add(searchResponse); } - SearchResponse mergedResponse = merger.getMergedResponse(clusters); - assertEquals(expectedTotalHits, mergedResponse.getHits().getTotalHits()); } private static Tuple randomTrackTotalHits() { diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 7090d590a4901..6230a24a0768f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -1094,23 +1094,33 @@ public void testCreateSearchResponseMerger() { assertEquals(-1, source.size()); assertEquals(-1, source.from()); assertNull(source.trackTotalHitsUpTo()); - SearchResponseMerger merger = TransportSearchAction.createSearchResponseMerger( - source, - timeProvider, - emptyReduceContextBuilder() - ); - assertEquals(0, merger.from); - assertEquals(10, merger.size); - assertEquals(SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO, merger.trackTotalHitsUpTo); - assertEquals(0, source.from()); - assertEquals(10, source.size()); - assertNull(source.trackTotalHitsUpTo()); + try ( + SearchResponseMerger merger = TransportSearchAction.createSearchResponseMerger( + source, + timeProvider, + emptyReduceContextBuilder() + ) + ) { + assertEquals(0, merger.from); + assertEquals(10, merger.size); + assertEquals(SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO, merger.trackTotalHitsUpTo); + assertEquals(0, source.from()); + assertEquals(10, source.size()); + assertNull(source.trackTotalHitsUpTo()); + } } { - SearchResponseMerger merger = TransportSearchAction.createSearchResponseMerger(null, timeProvider, emptyReduceContextBuilder()); - assertEquals(0, merger.from); - assertEquals(10, merger.size); - assertEquals(SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO, merger.trackTotalHitsUpTo); + try ( + SearchResponseMerger merger = TransportSearchAction.createSearchResponseMerger( + null, + timeProvider, + emptyReduceContextBuilder() + ) + ) { + assertEquals(0, merger.from); + assertEquals(10, merger.size); + assertEquals(SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO, merger.trackTotalHitsUpTo); + } } { SearchSourceBuilder source = new SearchSourceBuilder(); @@ -1120,17 +1130,20 @@ public void testCreateSearchResponseMerger() { source.size(originalSize); int trackTotalHitsUpTo = randomIntBetween(0, Integer.MAX_VALUE); source.trackTotalHitsUpTo(trackTotalHitsUpTo); - SearchResponseMerger merger = TransportSearchAction.createSearchResponseMerger( - source, - timeProvider, - emptyReduceContextBuilder() - ); - assertEquals(0, source.from()); - assertEquals(originalFrom + originalSize, source.size()); - assertEquals(trackTotalHitsUpTo, (int) source.trackTotalHitsUpTo()); - assertEquals(originalFrom, merger.from); - assertEquals(originalSize, merger.size); - assertEquals(trackTotalHitsUpTo, merger.trackTotalHitsUpTo); + try ( + SearchResponseMerger merger = TransportSearchAction.createSearchResponseMerger( + source, + timeProvider, + emptyReduceContextBuilder() + ) + ) { + assertEquals(0, source.from()); + assertEquals(originalFrom + originalSize, source.size()); + assertEquals(trackTotalHitsUpTo, (int) source.trackTotalHitsUpTo()); + assertEquals(originalFrom, merger.from); + assertEquals(originalSize, merger.size); + assertEquals(trackTotalHitsUpTo, merger.trackTotalHitsUpTo); + } } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index cef8d555b111d..13131a5e3eef7 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -354,7 +354,8 @@ public void testWithInQueryLine() throws Exception { try { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, line).relation(ShapeRelation.WITHIN)) - .get(); + .get() + .decRef(); } catch (SearchPhaseExecutionException e) { assertThat(e.getCause().getMessage(), containsString("Field [" + defaultFieldName + "] found an unsupported shape Line")); } From 316603548d0d20dcf8a62d62b6c92d708508b4b1 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 6 Dec 2023 16:22:06 +0100 Subject: [PATCH 253/263] [Connectors API] Add sync job status check to cancel connector sync job integration test. (#103057) Extend cancel connector sync integration test --- .../test/entsearch/430_connector_sync_job_cancel.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml index e9c612cbf9f27..d934b7c674f25 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml @@ -20,13 +20,21 @@ setup: id: test-connector job_type: full trigger_method: on_demand + - set: { id: sync-job-id-to-cancel } + - do: connector_sync_job.cancel: connector_sync_job_id: $sync-job-id-to-cancel - match: { acknowledged: true } + - do: + connector_sync_job.get: + connector_sync_job_id: $sync-job-id-to-cancel + + - match: { status: "canceling"} + --- "Cancel a Connector Sync Job - Connector Sync Job does not exist": From ed2155cc47366982ab22e0a276ef055de0b4279a Mon Sep 17 00:00:00 2001 From: sabi0 <2sabio@gmail.com> Date: Wed, 6 Dec 2023 16:27:16 +0100 Subject: [PATCH 254/263] Fix args length == 1 case handling in ESLoggerUsageChecker (#102382) * Fix args length == 1 case handling in ESLoggerUsageChecker There was an operator precedence mistake in: (lengthWithoutMarker == 1 || lengthWithoutMarker == 2) && lengthWithoutMarker == 2 ? ... Logical AND && has higher precedence than ternary operator ?:, So the above expression is equivalent to lengthWithoutMarker == 2 ? ... --------- Co-authored-by: Elastic Machine --- .../elasticsearch/test/loggerusage/ESLoggerUsageChecker.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java index bd51c74ee8e47..d7cde5676a27f 100644 --- a/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java +++ b/test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java @@ -364,9 +364,8 @@ private void verifyLoggerUsage( && argumentTypes[markerOffset + 1].equals(OBJECT_CLASS)) { // MULTI-PARAM METHOD: debug(Marker?, String, Object p0, ...) checkFixedArityArgs(methodNode, logMessageFrames[i], lineNumber, methodInsn, markerOffset + 0, lengthWithoutMarker - 1); - } else if ((lengthWithoutMarker == 1 || lengthWithoutMarker == 2) && lengthWithoutMarker == 2 - ? argumentTypes[markerOffset + 1].equals(THROWABLE_CLASS) - : true) { + } else if (lengthWithoutMarker == 1 + || (lengthWithoutMarker == 2 && argumentTypes[markerOffset + 1].equals(THROWABLE_CLASS))) { // all the rest: debug(Marker?, (Message|MessageSupplier|CharSequence|Object|String|Supplier), Throwable?) checkFixedArityArgs(methodNode, logMessageFrames[i], lineNumber, methodInsn, markerOffset + 0, 0); } else { From 1617a8db3643573671469b343e9c6dfb787026b2 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Wed, 6 Dec 2023 17:29:39 +0200 Subject: [PATCH 255/263] [ILM] More resilient when a policy is added to searchable snapshot (#102741) In this PR we enable ILM to handle the following scenarios: - An ILM policy with the a searchable snapshot action in hot or cold is added on a partially mounted searchable snapshot. - An ILM policy with the a searchable snapshot action in frozen is added on a fully mounted searchable snapshot. The searchable snapshot could have had a previous ILM policy that has been removed via POST /_ilm/remove or it might not have been managed at all. --- docs/changelog/102741.yaml | 6 + .../reference/ilm/actions/ilm-delete.asciidoc | 5 + .../xpack/core/ilm/MountSnapshotStep.java | 50 ++-- .../core/ilm/SearchableSnapshotAction.java | 64 +++-- .../actions/SearchableSnapshotActionIT.java | 218 +++++++++++++++++- 5 files changed, 304 insertions(+), 39 deletions(-) create mode 100644 docs/changelog/102741.yaml diff --git a/docs/changelog/102741.yaml b/docs/changelog/102741.yaml new file mode 100644 index 0000000000000..84a4b8092632f --- /dev/null +++ b/docs/changelog/102741.yaml @@ -0,0 +1,6 @@ +pr: 102741 +summary: "[ILM] More resilient when a policy is added to searchable snapshot" +area: ILM+SLM +type: bug +issues: + - 101958 diff --git a/docs/reference/ilm/actions/ilm-delete.asciidoc b/docs/reference/ilm/actions/ilm-delete.asciidoc index fbd7f1b0a238a..eac3b9804709a 100644 --- a/docs/reference/ilm/actions/ilm-delete.asciidoc +++ b/docs/reference/ilm/actions/ilm-delete.asciidoc @@ -16,6 +16,11 @@ Defaults to `true`. This option is applicable when the <> action is used in any previous phase. +WARNING: If a policy with a searchable snapshot action is applied on an existing searchable snapshot index, +the snapshot backing this index will NOT be deleted because it was not created by this policy. If you want +to clean this snapshot, please delete it manually after the index is deleted using the <>, you +can find the repository and snapshot name using the <>. + [[ilm-delete-action-ex]] ==== Example diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStep.java index 057f0c8930e66..96f280b4e03c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/MountSnapshotStep.java @@ -68,24 +68,32 @@ void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState currentCl String indexName = indexMetadata.getIndex().getName(); LifecycleExecutionState lifecycleState = indexMetadata.getLifecycleExecutionState(); + SearchableSnapshotAction.SearchableSnapshotMetadata searchableSnapshotMetadata = SearchableSnapshotAction + .extractSearchableSnapshotFromSettings(indexMetadata); String policyName = indexMetadata.getLifecyclePolicyName(); - final String snapshotRepository = lifecycleState.snapshotRepository(); + String snapshotRepository = lifecycleState.snapshotRepository(); if (Strings.hasText(snapshotRepository) == false) { - listener.onFailure( - new IllegalStateException( - "snapshot repository is not present for policy [" + policyName + "] and index [" + indexName + "]" - ) - ); - return; + if (searchableSnapshotMetadata == null) { + listener.onFailure( + new IllegalStateException( + "snapshot repository is not present for policy [" + policyName + "] and index [" + indexName + "]" + ) + ); + return; + } else { + snapshotRepository = searchableSnapshotMetadata.repositoryName(); + } } - final String snapshotName = lifecycleState.snapshotName(); - if (Strings.hasText(snapshotName) == false) { + String snapshotName = lifecycleState.snapshotName(); + if (Strings.hasText(snapshotName) == false && searchableSnapshotMetadata == null) { listener.onFailure( new IllegalStateException("snapshot name was not generated for policy [" + policyName + "] and index [" + indexName + "]") ); return; + } else if (searchableSnapshotMetadata != null) { + snapshotName = searchableSnapshotMetadata.snapshotName(); } String mountedIndexName = restoredIndexPrefix + indexName; @@ -102,16 +110,20 @@ void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState currentCl final String snapshotIndexName = lifecycleState.snapshotIndexName(); if (snapshotIndexName == null) { - // This index had its searchable snapshot created prior to a version where we captured - // the original index name, so make our best guess at the name - indexName = bestEffortIndexNameResolution(indexName); - logger.debug( - "index [{}] using policy [{}] does not have a stored snapshot index name, " - + "using our best effort guess of [{}] for the original snapshotted index name", - indexMetadata.getIndex().getName(), - policyName, - indexName - ); + if (searchableSnapshotMetadata == null) { + // This index had its searchable snapshot created prior to a version where we captured + // the original index name, so make our best guess at the name + indexName = bestEffortIndexNameResolution(indexName); + logger.debug( + "index [{}] using policy [{}] does not have a stored snapshot index name, " + + "using our best effort guess of [{}] for the original snapshotted index name", + indexMetadata.getIndex().getName(), + policyName, + indexName + ); + } else { + indexName = searchableSnapshotMetadata.sourceIndex(); + } } else { // Use the name of the snapshot as specified in the metadata, because the current index // name not might not reflect the name of the index actually in the snapshot diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java index 9ae0024c5a573..5b9b559b4d957 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SearchableSnapshotAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -32,6 +33,7 @@ import java.util.Objects; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOTS_REPOSITORY_NAME_SETTING_KEY; +import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOTS_SNAPSHOT_NAME_SETTING_KEY; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY; import static org.elasticsearch.xpack.core.searchablesnapshots.SearchableSnapshotsConstants.SEARCHABLE_SNAPSHOT_FEATURE; @@ -141,10 +143,12 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac IndexMetadata indexMetadata = clusterState.getMetadata().index(index); assert indexMetadata != null : "index " + index.getName() + " must exist in the cluster state"; String policyName = indexMetadata.getLifecyclePolicyName(); - if (indexMetadata.getSettings().get(LifecycleSettings.SNAPSHOT_INDEX_NAME) != null) { + SearchableSnapshotMetadata searchableSnapshotMetadata = extractSearchableSnapshotFromSettings(indexMetadata); + if (searchableSnapshotMetadata != null) { + // TODO: allow this behavior instead of returning false, in this case the index is already a searchable a snapshot + // so the most graceful way of recovery might be to use this repo // The index is already a searchable snapshot, let's see if the repository matches - String repo = indexMetadata.getSettings().get(SEARCHABLE_SNAPSHOTS_REPOSITORY_NAME_SETTING_KEY); - if (this.snapshotRepository.equals(repo) == false) { + if (this.snapshotRepository.equals(searchableSnapshotMetadata.repositoryName) == false) { // Okay, different repo, we need to go ahead with the searchable snapshot logger.debug( "[{}] action is configured for index [{}] in policy [{}] which is already mounted as a searchable " @@ -153,15 +157,14 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac SearchableSnapshotAction.NAME, index.getName(), policyName, - repo, + searchableSnapshotMetadata.repositoryName, this.snapshotRepository ); return false; } // Check to the storage type to see if we need to convert between full <-> partial - final boolean partial = indexMetadata.getSettings().getAsBoolean(SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY, false); - MountSearchableSnapshotRequest.Storage existingType = partial + MountSearchableSnapshotRequest.Storage existingType = searchableSnapshotMetadata.partial ? MountSearchableSnapshotRequest.Storage.SHARED_CACHE : MountSearchableSnapshotRequest.Storage.FULL_COPY; MountSearchableSnapshotRequest.Storage type = getConcreteStorageType(preActionBranchingKey); @@ -172,7 +175,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac SearchableSnapshotAction.NAME, index.getName(), policyName, - repo, + searchableSnapshotMetadata.repositoryName, type ); return true; @@ -215,7 +218,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac // When generating a snapshot, we either jump to the force merge step, or we skip the // forcemerge and go straight to steps for creating the snapshot StepKey keyForSnapshotGeneration = forceMergeIndex ? forceMergeStepKey : generateSnapshotNameKey; - // Branch, deciding whether there is an existing searchable snapshot snapshot that can be used for mounting the index + // Branch, deciding whether there is an existing searchable snapshot that can be used for mounting the index // (in which case, skip generating a new name and the snapshot cleanup), or if we need to generate a new snapshot BranchingStep skipGeneratingSnapshotStep = new BranchingStep( skipGeneratingSnapshotKey, @@ -225,7 +228,8 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac IndexMetadata indexMetadata = clusterState.getMetadata().index(index); String policyName = indexMetadata.getLifecyclePolicyName(); LifecycleExecutionState lifecycleExecutionState = indexMetadata.getLifecycleExecutionState(); - if (lifecycleExecutionState.snapshotName() == null) { + SearchableSnapshotMetadata searchableSnapshotMetadata = extractSearchableSnapshotFromSettings(indexMetadata); + if (lifecycleExecutionState.snapshotName() == null && searchableSnapshotMetadata == null) { // No name exists, so it must be generated logger.trace( "no snapshot name for index [{}] in policy [{}] exists, so one will be generated", @@ -234,8 +238,20 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac ); return false; } + String snapshotIndexName; + String snapshotName; + String repoName; + if (lifecycleExecutionState.snapshotName() != null) { + snapshotIndexName = lifecycleExecutionState.snapshotIndexName(); + snapshotName = lifecycleExecutionState.snapshotName(); + repoName = lifecycleExecutionState.snapshotRepository(); + } else { + snapshotIndexName = searchableSnapshotMetadata.sourceIndex; + snapshotName = searchableSnapshotMetadata.snapshotName; + repoName = searchableSnapshotMetadata.repositoryName; + } - if (this.snapshotRepository.equals(lifecycleExecutionState.snapshotRepository()) == false) { + if (this.snapshotRepository.equals(repoName) == false) { // A different repository is being used // TODO: allow this behavior instead of throwing an exception throw new IllegalArgumentException("searchable snapshot indices may be converted only within the same repository"); @@ -244,12 +260,14 @@ public List toSteps(Client client, String phase, StepKey nextStepKey, XPac // We can skip the generate, initial cleanup, and snapshot taking for this index, as we already have a generated snapshot. // This will jump ahead directly to the "mount snapshot" step logger.debug( - "an existing snapshot [{}] in repository [{}] (index name: [{}]) " - + "will be used for mounting [{}] as a searchable snapshot", - lifecycleExecutionState.snapshotName(), - lifecycleExecutionState.snapshotRepository(), - lifecycleExecutionState.snapshotIndexName(), - index.getName() + "Policy [{}] will use an existing snapshot [{}] in repository [{}] (index name: [{}]) " + + "to mount [{}] as a searchable snapshot. This snapshot was found in the {}.", + policyName, + snapshotName, + snapshotRepository, + snapshotIndexName, + index.getName(), + lifecycleExecutionState.snapshotName() != null ? "lifecycle execution state" : "metadata of " + index.getName() ); return true; } @@ -411,4 +429,18 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(snapshotRepository, forceMergeIndex); } + + @Nullable + static SearchableSnapshotMetadata extractSearchableSnapshotFromSettings(IndexMetadata indexMetadata) { + String indexName = indexMetadata.getSettings().get(LifecycleSettings.SNAPSHOT_INDEX_NAME); + if (indexName == null) { + return null; + } + String snapshotName = indexMetadata.getSettings().get(SEARCHABLE_SNAPSHOTS_SNAPSHOT_NAME_SETTING_KEY); + String repo = indexMetadata.getSettings().get(SEARCHABLE_SNAPSHOTS_REPOSITORY_NAME_SETTING_KEY); + final boolean partial = indexMetadata.getSettings().getAsBoolean(SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY, false); + return new SearchableSnapshotMetadata(indexName, repo, snapshotName, partial); + } + + record SearchableSnapshotMetadata(String sourceIndex, String repositoryName, String snapshotName, boolean partial) {}; } diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java index 9ec36d4d9b7cf..361cfd79b5e88 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java @@ -58,6 +58,7 @@ import static org.elasticsearch.xpack.TimeSeriesRestDriver.getStepKeyForIndex; import static org.elasticsearch.xpack.TimeSeriesRestDriver.indexDocument; import static org.elasticsearch.xpack.TimeSeriesRestDriver.rolloverMaxOneDocCondition; +import static org.elasticsearch.xpack.core.ilm.DeleteAction.WITH_SNAPSHOT_DELETE; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -184,10 +185,7 @@ public void testDeleteActionDeletesSearchableSnapshot() throws Exception { Map coldActions = Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo)); Map phases = new HashMap<>(); phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); - phases.put( - "delete", - new Phase("delete", TimeValue.timeValueMillis(10000), singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) - ); + phases.put("delete", new Phase("delete", TimeValue.timeValueMillis(10000), singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy XContentBuilder builder = jsonBuilder(); @@ -574,6 +572,218 @@ public void testConvertingSearchableSnapshotFromFullToPartial() throws Exception ); } + @SuppressWarnings("unchecked") + public void testResumingSearchableSnapshotFromFullToPartial() throws Exception { + String index = "myindex-" + randomAlphaOfLength(4).toLowerCase(Locale.ROOT); + createSnapshotRepo(client(), snapshotRepo, randomBoolean()); + var policyCold = "policy-cold"; + createPolicy( + client(), + policyCold, + null, + null, + new Phase( + "cold", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + null, + null + ); + var policyFrozen = "policy-cold-frozen"; + createPolicy( + client(), + policyFrozen, + null, + null, + new Phase( + "cold", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + new Phase( + "frozen", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + null + ); + + createIndex(index, Settings.EMPTY); + ensureGreen(index); + indexDocument(client(), index, true); + + // enable ILM after we indexed a document as otherwise ILM might sometimes run so fast the indexDocument call will fail with + // `index_not_found_exception` + updateIndexSettings(index, Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policyCold)); + + final String fullMountedIndexName = SearchableSnapshotAction.FULL_RESTORED_INDEX_PREFIX + index; + + assertBusy(() -> { + logger.info("--> waiting for [{}] to exist...", fullMountedIndexName); + assertTrue(indexExists(fullMountedIndexName)); + }, 30, TimeUnit.SECONDS); + + assertBusy(() -> { + Step.StepKey stepKeyForIndex = getStepKeyForIndex(client(), fullMountedIndexName); + assertThat(stepKeyForIndex.phase(), is("cold")); + assertThat(stepKeyForIndex.name(), is(PhaseCompleteStep.NAME)); + }, 30, TimeUnit.SECONDS); + + // remove ILM + { + Request request = new Request("POST", "/" + fullMountedIndexName + "/_ilm/remove"); + Map responseMap = responseAsMap(client().performRequest(request)); + assertThat(responseMap.get("has_failures"), is(false)); + } + // add cold-frozen + updateIndexSettings(index, Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policyFrozen)); + String partiallyMountedIndexName = SearchableSnapshotAction.PARTIAL_RESTORED_INDEX_PREFIX + fullMountedIndexName; + assertBusy(() -> { + logger.info("--> waiting for [{}] to exist...", partiallyMountedIndexName); + assertTrue(indexExists(partiallyMountedIndexName)); + }, 30, TimeUnit.SECONDS); + + assertBusy(() -> { + Step.StepKey stepKeyForIndex = getStepKeyForIndex(client(), partiallyMountedIndexName); + assertThat(stepKeyForIndex.phase(), is("frozen")); + assertThat(stepKeyForIndex.name(), is(PhaseCompleteStep.NAME)); + }, 30, TimeUnit.SECONDS); + + // Ensure the searchable snapshot is not deleted when the index was deleted because it was not created by this + // policy. We add the delete phase now to ensure that the index will not be deleted before we verify the above + // assertions + createPolicy( + client(), + policyFrozen, + null, + null, + new Phase( + "cold", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + new Phase( + "frozen", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) + ); + assertBusy(() -> { + logger.info("--> waiting for [{}] to be deleted...", partiallyMountedIndexName); + assertThat(indexExists(partiallyMountedIndexName), is(false)); + Request getSnaps = new Request("GET", "/_snapshot/" + snapshotRepo + "/_all"); + Map responseMap = responseAsMap(client().performRequest(getSnaps)); + assertThat(((List>) responseMap.get("snapshots")).size(), equalTo(1)); + }, 30, TimeUnit.SECONDS); + } + + @SuppressWarnings("unchecked") + public void testResumingSearchableSnapshotFromPartialToFull() throws Exception { + String index = "myindex-" + randomAlphaOfLength(4).toLowerCase(Locale.ROOT); + createSnapshotRepo(client(), snapshotRepo, randomBoolean()); + var policyCold = "policy-cold"; + createPolicy( + client(), + policyCold, + null, + null, + new Phase( + "cold", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + null, + null + ); + var policyColdFrozen = "policy-cold-frozen"; + createPolicy( + client(), + policyColdFrozen, + + null, + null, + new Phase( + "cold", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + new Phase( + "frozen", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + null + ); + + createIndex(index, Settings.EMPTY); + ensureGreen(index); + indexDocument(client(), index, true); + + // enable ILM after we indexed a document as otherwise ILM might sometimes run so fast the indexDocument call will fail with + // `index_not_found_exception` + updateIndexSettings(index, Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policyColdFrozen)); + + final String fullMountedIndexName = SearchableSnapshotAction.FULL_RESTORED_INDEX_PREFIX + index; + final String partialMountedIndexName = SearchableSnapshotAction.PARTIAL_RESTORED_INDEX_PREFIX + fullMountedIndexName; + + assertBusy(() -> { + logger.info("--> waiting for [{}] to exist...", partialMountedIndexName); + assertTrue(indexExists(partialMountedIndexName)); + }, 30, TimeUnit.SECONDS); + + assertBusy(() -> { + Step.StepKey stepKeyForIndex = getStepKeyForIndex(client(), partialMountedIndexName); + assertThat(stepKeyForIndex.phase(), is("frozen")); + assertThat(stepKeyForIndex.name(), is(PhaseCompleteStep.NAME)); + }, 30, TimeUnit.SECONDS); + + // remove ILM from the partially mounted searchable snapshot + { + Request request = new Request("POST", "/" + partialMountedIndexName + "/_ilm/remove"); + Map responseMap = responseAsMap(client().performRequest(request)); + assertThat(responseMap.get("has_failures"), is(false)); + } + // add a policy that will only include the fully mounted searchable snapshot + updateIndexSettings(index, Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policyCold)); + String restoredPartiallyMountedIndexName = SearchableSnapshotAction.FULL_RESTORED_INDEX_PREFIX + partialMountedIndexName; + assertBusy(() -> { + logger.info("--> waiting for [{}] to exist...", restoredPartiallyMountedIndexName); + assertTrue(indexExists(restoredPartiallyMountedIndexName)); + }, 30, TimeUnit.SECONDS); + + assertBusy(() -> { + Step.StepKey stepKeyForIndex = getStepKeyForIndex(client(), restoredPartiallyMountedIndexName); + assertThat(stepKeyForIndex.phase(), is("cold")); + assertThat(stepKeyForIndex.name(), is(PhaseCompleteStep.NAME)); + }, 30, TimeUnit.SECONDS); + + // Ensure the searchable snapshot is not deleted when the index was deleted because it was not created by this + // policy. We add the delete phase now to ensure that the index will not be deleted before we verify the above + // assertions + createPolicy( + client(), + policyCold, + null, + null, + new Phase( + "cold", + TimeValue.ZERO, + singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + ), + null, + new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) + ); + assertBusy(() -> { + logger.info("--> waiting for [{}] to be deleted...", restoredPartiallyMountedIndexName); + assertThat(indexExists(restoredPartiallyMountedIndexName), is(false)); + Request getSnaps = new Request("GET", "/_snapshot/" + snapshotRepo + "/_all"); + Map responseMap = responseAsMap(client().performRequest(getSnaps)); + assertThat(((List>) responseMap.get("snapshots")).size(), equalTo(1)); + }, 30, TimeUnit.SECONDS); + } + public void testSecondSearchableSnapshotUsingDifferentRepoThrows() throws Exception { String secondRepo = randomAlphaOfLengthBetween(10, 20); createSnapshotRepo(client(), snapshotRepo, randomBoolean()); From 5c3d118031dea20ef2e121da6d319c63a2ddb724 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 6 Dec 2023 08:15:18 -0800 Subject: [PATCH 256/263] Unmute HeapAttack tests (#102942) This PR re-enables two more heap attack tests. I have run more than 100 iterations with these tests without hitting any failures. --- .../elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java b/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java index 2cc13117a299f..37f2c86dbc251 100644 --- a/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java +++ b/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java @@ -113,7 +113,6 @@ public void testGroupOnSomeLongs() throws IOException { /** * This groups on 5000 columns which used to throw a {@link StackOverflowError}. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100640") public void testGroupOnManyLongs() throws IOException { initManyLongs(); Map map = XContentHelper.convertToMap( @@ -182,7 +181,6 @@ private Response concat(int evals) throws IOException { /** * Returns many moderately long strings. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100678") public void testManyConcat() throws IOException { initManyLongs(); Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(manyConcat(300).getEntity()), false); From c183b92585862a2d790fe6210dcf27a1fe4b30f0 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 6 Dec 2023 08:27:39 -0800 Subject: [PATCH 257/263] Fast path for reading single doc with ordinals (#102902) This optimization is added for enrich lookups, which are likely to match a single document. The change decreases the latency of the enrich operation in the nyc_taxis benchmark from 100ms to 70ms. When combined with #102901, it further reduces the latency to below 40ms, better than the previous performance before the regression. Relates #102625 --- docs/changelog/102902.yaml | 5 +++++ .../index/mapper/BlockDocValuesReader.java | 12 ++++++++++++ 2 files changed, 17 insertions(+) create mode 100644 docs/changelog/102902.yaml diff --git a/docs/changelog/102902.yaml b/docs/changelog/102902.yaml new file mode 100644 index 0000000000000..b33afdd35a603 --- /dev/null +++ b/docs/changelog/102902.yaml @@ -0,0 +1,5 @@ +pr: 102902 +summary: Fast path for reading single doc with ordinals +area: ES|QL +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java index 11e57e030dfe7..2160f52cbec02 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java @@ -555,8 +555,20 @@ private static class SingletonOrdinals extends BlockDocValuesReader { this.ordinals = ordinals; } + private BlockLoader.Block readSingleDoc(BlockFactory factory, int docId) throws IOException { + if (ordinals.advanceExact(docId)) { + BytesRef v = ordinals.lookupOrd(ordinals.ordValue()); + return factory.constantBytes(v); + } else { + return factory.constantNulls(); + } + } + @Override public BlockLoader.Block read(BlockFactory factory, Docs docs) throws IOException { + if (docs.count() == 1) { + return readSingleDoc(factory, docs.get(0)); + } try (BlockLoader.SingletonOrdinalsBuilder builder = factory.singletonOrdinalsBuilder(ordinals, docs.count())) { for (int i = 0; i < docs.count(); i++) { int doc = docs.get(i); From be1277a769c13156caa1ede1c3a1d46f536ab947 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 6 Dec 2023 09:14:39 -0800 Subject: [PATCH 258/263] Collect warnings in compute service (#103031) We have implemented #99927 in DriverRunner. However, we also need to implement this in ComputeService, where we spawn multiple requests to avoid losing response headers. Relates #99927 Closes #100163 Closes #102982 Closes #102871 Closes #103028 --- docs/changelog/103031.yaml | 9 ++ .../compute/operator/DriverRunner.java | 31 +------ .../operator/ResponseHeadersCollector.java | 60 +++++++++++++ .../ResponseHeadersCollectorTests.java | 72 ++++++++++++++++ .../src/main/resources/ip.csv-spec | 3 +- .../xpack/esql/action/WarningsIT.java | 85 +++++++++++++++++++ .../xpack/esql/plugin/ComputeService.java | 14 ++- 7 files changed, 241 insertions(+), 33 deletions(-) create mode 100644 docs/changelog/103031.yaml create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ResponseHeadersCollector.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ResponseHeadersCollectorTests.java create mode 100644 x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java diff --git a/docs/changelog/103031.yaml b/docs/changelog/103031.yaml new file mode 100644 index 0000000000000..f63094139f5ca --- /dev/null +++ b/docs/changelog/103031.yaml @@ -0,0 +1,9 @@ +pr: 103031 +summary: Collect warnings in compute service +area: ES|QL +type: bug +issues: + - 100163 + - 103028 + - 102871 + - 102982 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java index 4f16a615572b7..5de017fbd279e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverRunner.java @@ -9,16 +9,11 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tasks.TaskCancelledException; -import java.util.HashMap; -import java.util.LinkedHashSet; import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.atomic.AtomicReference; /** @@ -41,11 +36,10 @@ public DriverRunner(ThreadContext threadContext) { */ public void runToCompletion(List drivers, ActionListener listener) { AtomicReference failure = new AtomicReference<>(); - AtomicArray>> responseHeaders = new AtomicArray<>(drivers.size()); + var responseHeadersCollector = new ResponseHeadersCollector(threadContext); CountDown counter = new CountDown(drivers.size()); for (int i = 0; i < drivers.size(); i++) { Driver driver = drivers.get(i); - int driverIndex = i; ActionListener driverListener = new ActionListener<>() { @Override public void onResponse(Void unused) { @@ -80,9 +74,9 @@ public void onFailure(Exception e) { } private void done() { - responseHeaders.setOnce(driverIndex, threadContext.getResponseHeaders()); + responseHeadersCollector.collect(); if (counter.countDown()) { - mergeResponseHeaders(responseHeaders); + responseHeadersCollector.finish(); Exception error = failure.get(); if (error != null) { listener.onFailure(error); @@ -96,23 +90,4 @@ private void done() { start(driver, driverListener); } } - - private void mergeResponseHeaders(AtomicArray>> responseHeaders) { - final Map> merged = new HashMap<>(); - for (int i = 0; i < responseHeaders.length(); i++) { - final Map> resp = responseHeaders.get(i); - if (resp == null || resp.isEmpty()) { - continue; - } - for (Map.Entry> e : resp.entrySet()) { - // Use LinkedHashSet to retain the order of the values - merged.computeIfAbsent(e.getKey(), k -> new LinkedHashSet<>(e.getValue().size())).addAll(e.getValue()); - } - } - for (Map.Entry> e : merged.entrySet()) { - for (String v : e.getValue()) { - threadContext.addResponseHeader(e.getKey(), v); - } - } - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ResponseHeadersCollector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ResponseHeadersCollector.java new file mode 100644 index 0000000000000..8f40664be74d4 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ResponseHeadersCollector.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ThreadContext; + +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; + +/** + * A helper class that can be used to collect and merge response headers from multiple child requests. + */ +public final class ResponseHeadersCollector { + private final ThreadContext threadContext; + private final Queue>> collected = ConcurrentCollections.newQueue(); + + public ResponseHeadersCollector(ThreadContext threadContext) { + this.threadContext = threadContext; + } + + /** + * Called when a child request is completed to collect the response headers of the responding thread + */ + public void collect() { + Map> responseHeaders = threadContext.getResponseHeaders(); + if (responseHeaders.isEmpty() == false) { + collected.add(responseHeaders); + } + } + + /** + * Called when all child requests are completed. This will merge all collected response headers + * from the child requests and restore to the current thread. + */ + public void finish() { + final Map> merged = new HashMap<>(); + Map> resp; + while ((resp = collected.poll()) != null) { + for (Map.Entry> e : resp.entrySet()) { + // Use LinkedHashSet to retain the order of the values + merged.computeIfAbsent(e.getKey(), k -> new LinkedHashSet<>(e.getValue().size())).addAll(e.getValue()); + } + } + for (Map.Entry> e : merged.entrySet()) { + for (String v : e.getValue()) { + threadContext.addResponseHeader(e.getKey(), v); + } + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ResponseHeadersCollectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ResponseHeadersCollectorTests.java new file mode 100644 index 0000000000000..b09372f3a962c --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ResponseHeadersCollectorTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.FixedExecutorBuilder; +import org.elasticsearch.threadpool.TestThreadPool; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.equalTo; + +public class ResponseHeadersCollectorTests extends ESTestCase { + + public void testCollect() { + int numThreads = randomIntBetween(1, 10); + TestThreadPool threadPool = new TestThreadPool( + getTestClass().getSimpleName(), + new FixedExecutorBuilder(Settings.EMPTY, "test", numThreads, 1024, "test", EsExecutors.TaskTrackingConfig.DEFAULT) + ); + Set expectedWarnings = new HashSet<>(); + try { + ThreadContext threadContext = threadPool.getThreadContext(); + var collector = new ResponseHeadersCollector(threadContext); + PlainActionFuture future = new PlainActionFuture<>(); + Runnable mergeAndVerify = () -> { + collector.finish(); + List actualWarnings = threadContext.getResponseHeaders().getOrDefault("Warnings", List.of()); + assertThat(Sets.newHashSet(actualWarnings), equalTo(expectedWarnings)); + }; + try (RefCountingListener refs = new RefCountingListener(ActionListener.runAfter(future, mergeAndVerify))) { + CyclicBarrier barrier = new CyclicBarrier(numThreads); + for (int i = 0; i < numThreads; i++) { + String warning = "warning-" + i; + expectedWarnings.add(warning); + ActionListener listener = ActionListener.runBefore(refs.acquire(), collector::collect); + threadPool.schedule(new ActionRunnable<>(listener) { + @Override + protected void doRun() throws Exception { + barrier.await(30, TimeUnit.SECONDS); + try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { + threadContext.addResponseHeader("Warnings", warning); + listener.onResponse(null); + } + } + }, TimeValue.timeValueNanos(between(0, 1000_000)), threadPool.executor("test")); + } + } + future.actionGet(TimeValue.timeValueSeconds(30)); + } finally { + terminate(threadPool); + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index e0167ce451e80..02e9db6ededf1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -257,8 +257,7 @@ eth1 |alpha |::1 |::1 eth0 |beta |127.0.0.1 |::1 ; -// AwaitsFix: https://github.com/elastic/elasticsearch/issues/103028 -pushDownIPWithComparision#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only]-Ignore +pushDownIPWithComparision#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true warning:Line 1:20: evaluation of [ip1 > to_ip(\"127.0.0.1\")] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java new file mode 100644 index 0000000000000..12897979a47e0 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.transport.TransportService; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") +public class WarningsIT extends AbstractEsqlIntegTestCase { + + public void testCollectWarnings() { + final String node1, node2; + if (randomBoolean()) { + internalCluster().ensureAtLeastNumDataNodes(2); + node1 = randomDataNode().getName(); + node2 = randomValueOtherThan(node1, () -> randomDataNode().getName()); + } else { + node1 = randomDataNode().getName(); + node2 = randomDataNode().getName(); + } + + int numDocs1 = randomIntBetween(1, 15); + assertAcked( + client().admin() + .indices() + .prepareCreate("index-1") + .setSettings(Settings.builder().put("index.routing.allocation.require._name", node1)) + .setMapping("host", "type=keyword") + ); + for (int i = 0; i < numDocs1; i++) { + client().prepareIndex("index-1").setSource("host", "192." + i).get(); + } + int numDocs2 = randomIntBetween(1, 15); + assertAcked( + client().admin() + .indices() + .prepareCreate("index-2") + .setSettings(Settings.builder().put("index.routing.allocation.require._name", node2)) + .setMapping("host", "type=keyword") + ); + for (int i = 0; i < numDocs2; i++) { + client().prepareIndex("index-2").setSource("host", "10." + i).get(); + } + + DiscoveryNode coordinator = randomFrom(clusterService().state().nodes().stream().toList()); + client().admin().indices().prepareRefresh("index-1", "index-2").get(); + + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM index-* | EVAL ip = to_ip(host) | STATS s = COUNT(*) by ip | KEEP ip | LIMIT 100"); + request.pragmas(randomPragmas()); + PlainActionFuture future = new PlainActionFuture<>(); + client(coordinator.getName()).execute(EsqlQueryAction.INSTANCE, request, ActionListener.runBefore(future, () -> { + var threadpool = internalCluster().getInstance(TransportService.class, coordinator.getName()).getThreadPool(); + Map> responseHeaders = threadpool.getThreadContext().getResponseHeaders(); + List warnings = responseHeaders.getOrDefault("Warning", List.of()) + .stream() + .filter(w -> w.contains("is not an IP string literal")) + .toList(); + int expectedWarnings = Math.min(20, numDocs1 + numDocs2); + // we cap the number of warnings per node + assertThat(warnings.size(), greaterThanOrEqualTo(expectedWarnings)); + })); + future.actionGet(30, TimeUnit.SECONDS).close(); + } + + private DiscoveryNode randomDataNode() { + return randomFrom(clusterService().state().nodes().getDataNodes().values()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index dd5ae00294ed0..b7b31868d65e2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -32,6 +32,7 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverTaskRunner; +import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.compute.operator.exchange.ExchangeResponse; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; @@ -162,6 +163,8 @@ public void execute( LOGGER.debug("Sending data node plan\n{}\n with filter [{}]", dataNodePlan, requestFilter); + final var responseHeadersCollector = new ResponseHeadersCollector(transportService.getThreadPool().getThreadContext()); + listener = ActionListener.runBefore(listener, responseHeadersCollector::finish); String[] originalIndices = PlannerUtils.planOriginalIndices(physicalPlan); computeTargetNodes( rootTask, @@ -193,6 +196,7 @@ public void execute( computeContext, coordinatorPlan, cancelOnFailure(rootTask, cancelled, requestRefs.acquire()).map(driverProfiles -> { + responseHeadersCollector.collect(); if (configuration.profile()) { collectedProfiles.addAll(driverProfiles); } @@ -208,6 +212,7 @@ public void execute( exchangeSource, targetNodes, () -> cancelOnFailure(rootTask, cancelled, requestRefs.acquire()).map(response -> { + responseHeadersCollector.collect(); if (configuration.profile()) { collectedProfiles.addAll(response.profiles); } @@ -501,9 +506,12 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(driverProfiles -> { // don't return until all pages are fetched exchangeSink.addCompletionListener( - ActionListener.releaseAfter( - listener.map(nullValue -> new DataNodeResponse(driverProfiles)), - () -> exchangeService.finishSinkHandler(sessionId, null) + ContextPreservingActionListener.wrapPreservingContext( + ActionListener.releaseAfter( + listener.map(nullValue -> new DataNodeResponse(driverProfiles)), + () -> exchangeService.finishSinkHandler(sessionId, null) + ), + transportService.getThreadPool().getThreadContext() ) ); }, e -> { From 84f72797f54175fa03c533998442cc973d7950c6 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 6 Dec 2023 19:10:00 +0100 Subject: [PATCH 259/263] Fix search response leaks in EQL tests (#103068) Fixing all EQL tests --- .../assembler/ImplicitTiebreakerTests.java | 3 +- .../assembler/SequenceSpecTests.java | 3 +- .../execution/sample/CircuitBreakerTests.java | 25 +++---- .../sequence/CircuitBreakerTests.java | 70 ++++++++++--------- 4 files changed, 51 insertions(+), 50 deletions(-) diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java index e1a70dffef79a..85a34d7b6a943 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java @@ -84,8 +84,7 @@ public void query(QueryRequest r, ActionListener l) { ); SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); SearchResponseSections internal = new SearchResponseSections(searchHits, null, null, false, false, null, 0); - SearchResponse s = new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY); - l.onResponse(s); + ActionListener.respondAndRelease(l, new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY)); } @Override diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java index 87fd105ddf56f..336526a1153a5 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java @@ -222,8 +222,7 @@ public void query(QueryRequest r, ActionListener l) { 0.0f ); SearchResponseSections internal = new SearchResponseSections(searchHits, null, null, false, false, null, 0); - SearchResponse s = new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY); - l.onResponse(s); + ActionListener.respondAndRelease(l, new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY)); } @Override diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java index bac694996526d..e787505f7dfe3 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java @@ -224,19 +224,20 @@ void handleSearchRequest(ActionListener l) { ); SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); SearchResponseSections internal = new SearchResponseSections(searchHits, null, null, false, false, null, 0); - SearchResponse s = new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY); - l.onResponse(s); + ActionListener.respondAndRelease(l, new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY)); } @Override @@ -451,7 +450,7 @@ void handleSearchRequest(ActionListener 0); // at this point the algorithm already started adding up to memory usage } - listener.onResponse((Response) response); + ActionListener.respondAndRelease(listener, (Response) response); } } @@ -479,18 +478,20 @@ void handleSearchRequest(ActionListener 0); // at this point the algorithm already started adding up to memory usage ShardSearchFailure[] failures = new ShardSearchFailure[] { @@ -504,28 +505,29 @@ void handleSearchRequest(ActionListener Date: Wed, 6 Dec 2023 12:18:47 -0600 Subject: [PATCH 260/263] Update IronBank docker image base to ubi:9.3 (#102721) --- distribution/docker/src/docker/Dockerfile | 2 +- .../docker/src/docker/iron_bank/hardening_manifest.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index 8fac93211d82b..32f35b05015b9 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -22,7 +22,7 @@ <% if (docker_base == 'iron_bank') { %> ARG BASE_REGISTRY=registry1.dso.mil ARG BASE_IMAGE=ironbank/redhat/ubi/ubi9 -ARG BASE_TAG=9.2 +ARG BASE_TAG=9.3 <% } %> ################################################################################ diff --git a/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml b/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml index 7152f6d18f1d2..38ce16a413af2 100644 --- a/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml +++ b/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml @@ -14,7 +14,7 @@ tags: # Build args passed to Dockerfile ARGs args: BASE_IMAGE: "redhat/ubi/ubi9" - BASE_TAG: "9.2" + BASE_TAG: "9.3" # Docker image labels labels: From 7413e4169d128c3c6004e79ee9dd3bb59acd74f3 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Wed, 6 Dec 2023 19:33:52 +0100 Subject: [PATCH 261/263] Fix headers check in SingleValueQueryTests (#103074) If the generated docs have no values at all for the first documents which are checked, don't expect Warnings either. Closes #102997 --- .../xpack/esql/querydsl/query/SingleValueQueryTests.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index 4322e5fbac2ef..f5fc643d98fe6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -156,15 +156,18 @@ private void runCase(List> fieldValues, int count, Integer docsStar int expected = 0; int min = docsStart != null ? docsStart : 0; int max = docsStop != null ? docsStop : fieldValues.size(); + int valuesCount = 0; for (int i = min; i < max; i++) { - if (fieldValues.get(i).size() == 1) { + int mvCount = fieldValues.get(i).size(); + if (mvCount == 1) { expected++; } + valuesCount += mvCount; } assertThat(count, equalTo(expected)); // query's count runs against the full set, not just min-to-max - if (fieldValues.stream().anyMatch(x -> x.size() > 1)) { + if (valuesCount > 0 && fieldValues.stream().anyMatch(x -> x.size() > 1)) { assertWarnings( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", "Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value" From 48144ba1589d0a471dc854d156436255d4843346 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 6 Dec 2023 20:11:47 +0100 Subject: [PATCH 262/263] Fix SearchResponse reference count leaks in ML module (#103009) Fixing all kinds of leaks in both ml prod and test code. Added a new utility for a very common operation in tests that I'm planning on replacing other use sites with in a follow up. --- .../search/SearchResponseUtils.java | 23 ++++ .../xpack/core/ClientHelperTests.java | 4 +- .../ClassificationHousePricingIT.java | 60 ++++++----- .../ml/integration/DatafeedWithAggsIT.java | 7 +- .../ml/integration/DeleteExpiredDataIT.java | 31 +++--- .../integration/RunDataFrameAnalyticsIT.java | 12 ++- .../license/MachineLearningLicensingIT.java | 2 +- .../BucketCorrelationAggregationIT.java | 64 ++++++----- .../CategorizeTextAggregationIT.java | 100 ++++++++++-------- .../integration/DataFrameAnalyticsCRUDIT.java | 19 ++-- .../xpack/ml/integration/DatafeedCcsIT.java | 6 +- .../xpack/ml/integration/IndexLayoutIT.java | 6 +- .../integration/JobStorageDeletionTaskIT.java | 10 +- .../ml/integration/TrainedModelCRUDIT.java | 11 +- .../DatafeedDelayedDataDetector.java | 20 ++-- .../AbstractAggregationDataExtractor.java | 12 ++- .../CompositeAggregationDataExtractor.java | 34 ++++-- .../chunked/ChunkedDataExtractor.java | 70 +++++++----- .../extractor/scroll/ScrollDataExtractor.java | 60 +++++++---- .../extractor/DataFrameDataExtractor.java | 24 +++-- .../dataframe/inference/InferenceRunner.java | 31 +++--- .../process/AnalyticsProcessManager.java | 6 +- .../process/NativeAnalyticsProcess.java | 14 ++- .../TrainTestSplitterFactory.java | 48 +++++---- .../ChunkedTrainedModelRestorer.java | 84 ++++++++------- .../job/persistence/JobResultsPersister.java | 12 ++- .../job/persistence/JobResultsProvider.java | 70 ++++++------ .../ml/job/persistence/StateStreamer.java | 34 +++--- .../ml/process/IndexingStateProcessor.java | 10 +- .../persistence/BatchedDocumentsIterator.java | 8 +- .../persistence/ResultsPersisterService.java | 5 +- .../SearchAfterDocumentsIterator.java | 10 +- .../input/search/ExecutableSearchInput.java | 58 +++++----- .../search/ExecutableSearchTransform.java | 16 +-- 34 files changed, 574 insertions(+), 407 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java new file mode 100644 index 0000000000000..e61b89fcff42c --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.search; + +import org.elasticsearch.action.search.SearchRequestBuilder; + +public enum SearchResponseUtils { + ; + + public static long getTotalHitsValue(SearchRequestBuilder request) { + var resp = request.get(); + try { + return resp.getHits().getTotalHits().value; + } finally { + resp.decRef(); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java index 0f3a58350c36a..99826b5537258 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java @@ -340,7 +340,7 @@ private void assertExecutionWithOrigin(Map storedHeaders, Client assertThat(headers, not(hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "anything"))); return client.search(new SearchRequest()).actionGet(); - }); + }).decRef(); } /** @@ -356,7 +356,7 @@ public void assertRunAsExecution(Map storedHeaders, Consumer destDoc = getDestDoc(config, hit); - assertNotNull(destDoc); - Map resultsObject = getFieldValue(destDoc, "ml"); - assertThat(resultsObject.containsKey(predictionField), is(true)); - String predictionValue = (String) resultsObject.get(predictionField); - assertNotNull(predictionValue); - assertThat(resultsObject.containsKey("feature_importance"), is(true)); - @SuppressWarnings("unchecked") - List> importanceArray = (List>) resultsObject.get("feature_importance"); - assertThat( - Strings.format(str, modelId, numberTrees) + predictionValue + hyperparameters + modelDefinition, - importanceArray, - hasSize(greaterThan(0)) - ); + try { + // obtain addition information for investigation of #90599 + String modelId = getModelId(jobId); + TrainedModelMetadata modelMetadata = getModelMetadata(modelId); + assertThat(modelMetadata.getHyperparameters().size(), greaterThan(0)); + StringBuilder hyperparameters = new StringBuilder(); // used to investigate #90019 + for (Hyperparameters hyperparameter : modelMetadata.getHyperparameters()) { + hyperparameters.append(hyperparameter.hyperparameterName).append(": ").append(hyperparameter.value).append("\n"); + } + TrainedModelDefinition modelDefinition = getModelDefinition(modelId); + Ensemble ensemble = (Ensemble) modelDefinition.getTrainedModel(); + int numberTrees = ensemble.getModels().size(); + String str = "Failure: failed for modelId %s numberTrees %d\n"; + for (SearchHit hit : sourceData.getHits()) { + Map destDoc = getDestDoc(config, hit); + assertNotNull(destDoc); + Map resultsObject = getFieldValue(destDoc, "ml"); + assertThat(resultsObject.containsKey(predictionField), is(true)); + String predictionValue = (String) resultsObject.get(predictionField); + assertNotNull(predictionValue); + assertThat(resultsObject.containsKey("feature_importance"), is(true)); + @SuppressWarnings("unchecked") + List> importanceArray = (List>) resultsObject.get("feature_importance"); + assertThat( + Strings.format(str, modelId, numberTrees) + predictionValue + hyperparameters + modelDefinition, + importanceArray, + hasSize(greaterThan(0)) + ); + } + } finally { + sourceData.decRef(); } - } static void indexData(String sourceIndex) { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedWithAggsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedWithAggsIT.java index 9773a4d3b3d82..b1b645c224e34 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedWithAggsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedWithAggsIT.java @@ -35,6 +35,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -163,7 +164,7 @@ private void testDfWithAggs(AggregatorFactories.Builder aggs, Detector.Builder d bucket.getEventCount() ); // Confirm that it's possible to search for the same buckets by @timestamp - proves that @timestamp works as a field alias - assertThat( + assertHitCount( prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).setQuery( QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("job_id", jobId)) @@ -171,8 +172,8 @@ private void testDfWithAggs(AggregatorFactories.Builder aggs, Detector.Builder d .filter( QueryBuilders.rangeQuery("@timestamp").gte(bucket.getTimestamp().getTime()).lte(bucket.getTimestamp().getTime()) ) - ).setTrackTotalHits(true).get().getHits().getTotalHits().value, - equalTo(1L) + ).setTrackTotalHits(true), + 1 ); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index cf73b5a4a7544..00fdaa348409a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -268,14 +269,13 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw retainAllSnapshots("snapshots-retention-with-retain"); - long totalModelSizeStatsBeforeDelete = prepareSearch("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) - .setQuery(QueryBuilders.termQuery("result_type", "model_size_stats")) - .get() - .getHits() - .getTotalHits().value; - long totalNotificationsCountBeforeDelete = prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).get() - .getHits() - .getTotalHits().value; + long totalModelSizeStatsBeforeDelete = SearchResponseUtils.getTotalHitsValue( + prepareSearch("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) + .setQuery(QueryBuilders.termQuery("result_type", "model_size_stats")) + ); + long totalNotificationsCountBeforeDelete = SearchResponseUtils.getTotalHitsValue( + prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) + ); assertThat(totalModelSizeStatsBeforeDelete, greaterThan(0L)); assertThat(totalNotificationsCountBeforeDelete, greaterThan(0L)); @@ -319,14 +319,13 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw assertThat(getRecords("results-and-snapshots-retention").size(), equalTo(0)); assertThat(getModelSnapshots("results-and-snapshots-retention").size(), equalTo(1)); - long totalModelSizeStatsAfterDelete = prepareSearch("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) - .setQuery(QueryBuilders.termQuery("result_type", "model_size_stats")) - .get() - .getHits() - .getTotalHits().value; - long totalNotificationsCountAfterDelete = prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).get() - .getHits() - .getTotalHits().value; + long totalModelSizeStatsAfterDelete = SearchResponseUtils.getTotalHitsValue( + prepareSearch("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) + .setQuery(QueryBuilders.termQuery("result_type", "model_size_stats")) + ); + long totalNotificationsCountAfterDelete = SearchResponseUtils.getTotalHitsValue( + prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) + ); assertThat(totalModelSizeStatsAfterDelete, equalTo(totalModelSizeStatsBeforeDelete)); assertThat(totalNotificationsCountAfterDelete, greaterThanOrEqualTo(totalNotificationsCountBeforeDelete)); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index 2ab5ecb00aa00..8fbad7ccd3877 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.GetDataFrameAnalyticsStatsAction; import org.elasticsearch.xpack.core.ml.action.NodeAcknowledgedResponse; @@ -396,11 +397,12 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti assertResponse(prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true), searchResponse -> { if (searchResponse.getHits().getTotalHits().value == docCount) { - searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) - .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) - .get(); - logger.debug("We stopped during analysis: [{}] < [{}]", searchResponse.getHits().getTotalHits().value, docCount); - assertThat(searchResponse.getHits().getTotalHits().value, lessThan((long) docCount)); + long seenCount = SearchResponseUtils.getTotalHitsValue( + prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) + .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) + ); + logger.debug("We stopped during analysis: [{}] < [{}]", seenCount, docCount); + assertThat(seenCount, lessThan((long) docCount)); } else { logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value, docCount); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java index 81ae60ecfa9ae..a98dfa223b8ae 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java @@ -756,7 +756,7 @@ public void testInferenceAggRestricted() { SearchRequest search = new SearchRequest(index); search.source().aggregation(termsAgg); - client().search(search).actionGet(); + client().search(search).actionGet().decRef(); // Pick a license that does not allow machine learning License.OperationMode mode = randomInvalidLicenseType(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java index fc35c8491094e..c15750de3b336 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -31,6 +30,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Stream; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.closeTo; public class BucketCorrelationAggregationIT extends MlSingleNodeTestCase { @@ -71,34 +71,42 @@ public void testCountCorrelation() { AtomicLong counter = new AtomicLong(); double[] steps = Stream.generate(() -> counter.getAndAdd(2L)).limit(50).mapToDouble(l -> (double) l).toArray(); - SearchResponse percentilesSearch = client().prepareSearch("data") - .addAggregation(AggregationBuilders.percentiles("percentiles").field("metric").percentiles(steps)) - .setSize(0) - .setTrackTotalHits(true) - .get(); - long totalHits = percentilesSearch.getHits().getTotalHits().value; - Percentiles percentiles = percentilesSearch.getAggregations().get("percentiles"); - Tuple aggs = buildRangeAggAndSetExpectations( - percentiles, - steps, - totalHits, - "metric" + assertResponse( + client().prepareSearch("data") + .addAggregation(AggregationBuilders.percentiles("percentiles").field("metric").percentiles(steps)) + .setSize(0) + .setTrackTotalHits(true), + percentilesSearch -> { + long totalHits = percentilesSearch.getHits().getTotalHits().value; + Percentiles percentiles = percentilesSearch.getAggregations().get("percentiles"); + Tuple aggs = buildRangeAggAndSetExpectations( + percentiles, + steps, + totalHits, + "metric" + ); + + assertResponse( + client().prepareSearch("data") + .setSize(0) + .setTrackTotalHits(false) + .addAggregation( + AggregationBuilders.terms("buckets").field("term").subAggregation(aggs.v1()).subAggregation(aggs.v2()) + ), + countCorrelations -> { + + Terms terms = countCorrelations.getAggregations().get("buckets"); + Terms.Bucket catBucket = terms.getBucketByKey("cat"); + Terms.Bucket dogBucket = terms.getBucketByKey("dog"); + NumericMetricsAggregation.SingleValue approxCatCorrelation = catBucket.getAggregations().get("correlates"); + NumericMetricsAggregation.SingleValue approxDogCorrelation = dogBucket.getAggregations().get("correlates"); + + assertThat(approxCatCorrelation.value(), closeTo(catCorrelation, 0.1)); + assertThat(approxDogCorrelation.value(), closeTo(dogCorrelation, 0.1)); + } + ); + } ); - - SearchResponse countCorrelations = client().prepareSearch("data") - .setSize(0) - .setTrackTotalHits(false) - .addAggregation(AggregationBuilders.terms("buckets").field("term").subAggregation(aggs.v1()).subAggregation(aggs.v2())) - .get(); - - Terms terms = countCorrelations.getAggregations().get("buckets"); - Terms.Bucket catBucket = terms.getBucketByKey("cat"); - Terms.Bucket dogBucket = terms.getBucketByKey("dog"); - NumericMetricsAggregation.SingleValue approxCatCorrelation = catBucket.getAggregations().get("correlates"); - NumericMetricsAggregation.SingleValue approxDogCorrelation = dogBucket.getAggregations().get("correlates"); - - assertThat(approxCatCorrelation.value(), closeTo(catCorrelation, 0.1)); - assertThat(approxDogCorrelation.value(), closeTo(dogCorrelation, 0.1)); } private static Tuple buildRangeAggAndSetExpectations( diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextAggregationIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextAggregationIT.java index d356fe49f9120..d4b29e3c92538 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextAggregationIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextAggregationIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -21,6 +20,7 @@ import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.junit.Before; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -39,53 +39,69 @@ public void setupCluster() { } public void testAggregation() { - SearchResponse response = prepareSearch(DATA_INDEX).setSize(0) - .setTrackTotalHits(false) - .addAggregation( - new CategorizeTextAggregationBuilder("categorize", "msg").subAggregation(AggregationBuilders.max("max").field("time")) - .subAggregation(AggregationBuilders.min("min").field("time")) - ) - .get(); - - InternalCategorizationAggregation agg = response.getAggregations().get("categorize"); - assertThat(agg.getBuckets(), hasSize(3)); - - assertCategorizationBucket(agg.getBuckets().get(0), "Node started", 3); - assertCategorizationBucket(agg.getBuckets().get(1), "Failed to shutdown error org.aaaa.bbbb.Cccc line caused by foo exception", 2); - assertCategorizationBucket(agg.getBuckets().get(2), "Node stopped", 1); + assertResponse( + prepareSearch(DATA_INDEX).setSize(0) + .setTrackTotalHits(false) + .addAggregation( + new CategorizeTextAggregationBuilder("categorize", "msg").subAggregation(AggregationBuilders.max("max").field("time")) + .subAggregation(AggregationBuilders.min("min").field("time")) + ), + response -> { + + InternalCategorizationAggregation agg = response.getAggregations().get("categorize"); + assertThat(agg.getBuckets(), hasSize(3)); + + assertCategorizationBucket(agg.getBuckets().get(0), "Node started", 3); + assertCategorizationBucket( + agg.getBuckets().get(1), + "Failed to shutdown error org.aaaa.bbbb.Cccc line caused by foo exception", + 2 + ); + assertCategorizationBucket(agg.getBuckets().get(2), "Node stopped", 1); + } + ); } public void testAggregationWithOnlyOneBucket() { - SearchResponse response = prepareSearch(DATA_INDEX).setSize(0) - .setTrackTotalHits(false) - .addAggregation( - new CategorizeTextAggregationBuilder("categorize", "msg").size(1) - .subAggregation(AggregationBuilders.max("max").field("time")) - .subAggregation(AggregationBuilders.min("min").field("time")) - ) - .get(); - InternalCategorizationAggregation agg = response.getAggregations().get("categorize"); - assertThat(agg.getBuckets(), hasSize(1)); - - assertCategorizationBucket(agg.getBuckets().get(0), "Node started", 3); + assertResponse( + prepareSearch(DATA_INDEX).setSize(0) + .setTrackTotalHits(false) + .addAggregation( + new CategorizeTextAggregationBuilder("categorize", "msg").size(1) + .subAggregation(AggregationBuilders.max("max").field("time")) + .subAggregation(AggregationBuilders.min("min").field("time")) + ), + response -> { + InternalCategorizationAggregation agg = response.getAggregations().get("categorize"); + assertThat(agg.getBuckets(), hasSize(1)); + assertCategorizationBucket(agg.getBuckets().get(0), "Node started", 3); + } + ); } public void testAggregationWithBroadCategories() { - SearchResponse response = prepareSearch(DATA_INDEX).setSize(0) - .setTrackTotalHits(false) - .addAggregation( - // Overriding the similarity threshold to just 11% (default is 70%) results in the - // "Node started" and "Node stopped" messages being grouped in the same category - new CategorizeTextAggregationBuilder("categorize", "msg").setSimilarityThreshold(11) - .subAggregation(AggregationBuilders.max("max").field("time")) - .subAggregation(AggregationBuilders.min("min").field("time")) - ) - .get(); - InternalCategorizationAggregation agg = response.getAggregations().get("categorize"); - assertThat(agg.getBuckets(), hasSize(2)); - - assertCategorizationBucket(agg.getBuckets().get(0), "Node", 4); - assertCategorizationBucket(agg.getBuckets().get(1), "Failed to shutdown error org.aaaa.bbbb.Cccc line caused by foo exception", 2); + assertResponse( + prepareSearch(DATA_INDEX).setSize(0) + .setTrackTotalHits(false) + .addAggregation( + // Overriding the similarity threshold to just 11% (default is 70%) results in the + // "Node started" and "Node stopped" messages being grouped in the same category + new CategorizeTextAggregationBuilder("categorize", "msg").setSimilarityThreshold(11) + .subAggregation(AggregationBuilders.max("max").field("time")) + .subAggregation(AggregationBuilders.min("min").field("time")) + ), + response -> { + InternalCategorizationAggregation agg = response.getAggregations().get("categorize"); + assertThat(agg.getBuckets(), hasSize(2)); + + assertCategorizationBucket(agg.getBuckets().get(0), "Node", 4); + assertCategorizationBucket( + agg.getBuckets().get(1), + "Failed to shutdown error org.aaaa.bbbb.Cccc line caused by foo exception", + 2 + ); + } + ); } private void assertCategorizationBucket(InternalCategorizationAggregation.Bucket bucket, String key, long docCount) { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java index 12bc1a6019119..a8e97263647ea 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java @@ -25,6 +25,7 @@ import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptyMap; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -96,7 +97,7 @@ public void testDeleteConfigWithStateAndStats() throws InterruptedException { client().execute(DeleteDataFrameAnalyticsAction.INSTANCE, new DeleteDataFrameAnalyticsAction.Request(configId)).actionGet(); - assertThat( + assertHitCount( originSettingClient.prepareSearch(".ml-state-*") .setQuery( QueryBuilders.idsQuery() @@ -105,21 +106,15 @@ public void testDeleteConfigWithStateAndStats() throws InterruptedException { "data_frame_analytics-delete-config-with-state-and-stats-progress" ) ) - .setTrackTotalHits(true) - .get() - .getHits() - .getTotalHits().value, - equalTo(0L) + .setTrackTotalHits(true), + 0 ); - assertThat( + assertHitCount( originSettingClient.prepareSearch(".ml-stats-*") .setQuery(QueryBuilders.idsQuery().addIds("delete-config-with-state-and-stats_1", "delete-config-with-state-and-stats_2")) - .setTrackTotalHits(true) - .get() - .getHits() - .getTotalHits().value, - equalTo(0L) + .setTrackTotalHits(true), + 0 ); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java index a5a4103d0cb7a..b71ecd4858533 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java @@ -191,7 +191,11 @@ private boolean doesLocalAuditMessageExist(String message) { SearchResponse response = client(LOCAL_CLUSTER).prepareSearch(".ml-notifications*") .setQuery(new MatchPhraseQueryBuilder("message", message)) .get(); - return response.getHits().getTotalHits().value > 0; + try { + return response.getHits().getTotalHits().value > 0; + } finally { + response.decRef(); + } } catch (ElasticsearchException e) { return false; } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/IndexLayoutIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/IndexLayoutIT.java index db88cb5dc266e..99052c771fb49 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/IndexLayoutIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/IndexLayoutIT.java @@ -26,6 +26,7 @@ import java.time.temporal.ChronoUnit; import java.util.Collections; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; @@ -140,10 +141,7 @@ public void testForceCloseDoesNotCreateState() throws Exception { arrayContaining(".ml-state-000001") ); - assertThat( - client.prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setTrackTotalHits(true).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertHitCount(client.prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setTrackTotalHits(true), 0); } } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java index aa8b29228b790..b7bd8fed3e83c 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java @@ -43,6 +43,7 @@ import java.util.HashSet; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -202,7 +203,7 @@ public void testDeleteDedicatedJobWithDataInShared() throws Exception { ); // Make sure all results referencing the dedicated job are gone - assertThat( + assertHitCount( prepareSearch().setIndices(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*") .setIndicesOptions(IndicesOptions.lenientExpandOpenHidden()) .setTrackTotalHits(true) @@ -210,11 +211,8 @@ public void testDeleteDedicatedJobWithDataInShared() throws Exception { .setSource( SearchSourceBuilder.searchSource() .query(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobIdDedicated))) - ) - .get() - .getHits() - .getTotalHits().value, - equalTo(0L) + ), + 0 ); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelCRUDIT.java index 9b1f4c856df85..3a08b56ed38a4 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelCRUDIT.java @@ -23,6 +23,7 @@ import java.util.Base64; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isA; @@ -102,15 +103,7 @@ public void testPutTrainedModelAndDefinition() { client().execute(DeleteTrainedModelAction.INSTANCE, new DeleteTrainedModelAction.Request(modelId)).actionGet(); - assertThat( - client().prepareSearch(InferenceIndexConstants.nativeDefinitionStore()) - .setTrackTotalHitsUpTo(1) - .setSize(0) - .get() - .getHits() - .getTotalHits().value, - equalTo(0L) - ); + assertHitCount(client().prepareSearch(InferenceIndexConstants.nativeDefinitionStore()).setTrackTotalHitsUpTo(1).setSize(0), 0); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java index 341746a097bb2..0374dbf8eb1fe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/delayeddatacheck/DatafeedDelayedDataDetector.java @@ -135,16 +135,20 @@ private Map checkCurrentBucketEventCount(long start, long end) { SearchRequest searchRequest = new SearchRequest(datafeedIndices).source(searchSourceBuilder).indicesOptions(indicesOptions); try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(ML_ORIGIN)) { SearchResponse response = client.execute(TransportSearchAction.TYPE, searchRequest).actionGet(); - List buckets = ((Histogram) response.getAggregations().get(DATE_BUCKETS)).getBuckets(); - Map hashMap = Maps.newMapWithExpectedSize(buckets.size()); - for (Histogram.Bucket bucket : buckets) { - long bucketTime = toHistogramKeyToEpoch(bucket.getKey()); - if (bucketTime < 0) { - throw new IllegalStateException("Histogram key [" + bucket.getKey() + "] cannot be converted to a timestamp"); + try { + List buckets = ((Histogram) response.getAggregations().get(DATE_BUCKETS)).getBuckets(); + Map hashMap = Maps.newMapWithExpectedSize(buckets.size()); + for (Histogram.Bucket bucket : buckets) { + long bucketTime = toHistogramKeyToEpoch(bucket.getKey()); + if (bucketTime < 0) { + throw new IllegalStateException("Histogram key [" + bucket.getKey() + "] cannot be converted to a timestamp"); + } + hashMap.put(bucketTime, bucket.getDocCount()); } - hashMap.put(bucketTime, bucket.getDocCount()); + return hashMap; + } finally { + response.decRef(); } - return hashMap; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java index 421581e2622ab..fd57419abaa83 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java @@ -122,10 +122,14 @@ private Aggregations search() { T searchRequest = buildSearchRequest(buildBaseSearchSource()); assert searchRequest.request().allowPartialSearchResults() == false; SearchResponse searchResponse = executeSearchRequest(searchRequest); - checkForSkippedClusters(searchResponse); - LOGGER.debug("[{}] Search response was obtained", context.jobId); - timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - return validateAggs(searchResponse.getAggregations()); + try { + checkForSkippedClusters(searchResponse); + LOGGER.debug("[{}] Search response was obtained", context.jobId); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + return validateAggs(searchResponse.getAggregations()); + } finally { + searchResponse.decRef(); + } } private void initAggregationProcessor(Aggregations aggs) throws IOException { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java index 859dd506a7712..d4bd75c92eb18 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java @@ -138,17 +138,21 @@ private Aggregations search() { searchSourceBuilder.aggregation(compositeAggregationBuilder); ActionRequestBuilder searchRequest = requestBuilder.build(searchSourceBuilder); SearchResponse searchResponse = executeSearchRequest(searchRequest); - LOGGER.trace(() -> "[" + context.jobId + "] Search composite response was obtained"); - timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - Aggregations aggregations = searchResponse.getAggregations(); - if (aggregations == null) { - return null; - } - CompositeAggregation compositeAgg = aggregations.get(compositeAggregationBuilder.getName()); - if (compositeAgg == null || compositeAgg.getBuckets().isEmpty()) { - return null; + try { + LOGGER.trace(() -> "[" + context.jobId + "] Search composite response was obtained"); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + Aggregations aggregations = searchResponse.getAggregations(); + if (aggregations == null) { + return null; + } + CompositeAggregation compositeAgg = aggregations.get(compositeAggregationBuilder.getName()); + if (compositeAgg == null || compositeAgg.getBuckets().isEmpty()) { + return null; + } + return aggregations; + } finally { + searchResponse.decRef(); } - return aggregations; } protected SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { @@ -158,7 +162,15 @@ protected SearchResponse executeSearchRequest(ActionRequestBuilder 0) { - Aggregations aggregations = searchResponse.getAggregations(); - Min min = aggregations.get(EARLIEST_TIME); - earliestTime = (long) min.value(); - Max max = aggregations.get(LATEST_TIME); - latestTime = (long) max.value(); + try { + LOGGER.debug("[{}] Scrolling Data summary response was obtained", context.jobId); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + + long earliestTime = 0; + long latestTime = 0; + long totalHits = searchResponse.getHits().getTotalHits().value; + if (totalHits > 0) { + Aggregations aggregations = searchResponse.getAggregations(); + Min min = aggregations.get(EARLIEST_TIME); + earliestTime = (long) min.value(); + Max max = aggregations.get(LATEST_TIME); + latestTime = (long) max.value(); + } + return new ScrolledDataSummary(earliestTime, latestTime, totalHits); + } finally { + searchResponse.decRef(); } - return new ScrolledDataSummary(earliestTime, latestTime, totalHits); } private DataSummary newAggregatedDataSummary() { @@ -253,20 +265,24 @@ private DataSummary newAggregatedDataSummary() { ActionRequestBuilder searchRequestBuilder = dataExtractorFactory instanceof RollupDataExtractorFactory ? rollupRangeSearchRequest() : rangeSearchRequest(); SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); - LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId); - timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - - Aggregations aggregations = searchResponse.getAggregations(); - // This can happen if all the indices the datafeed is searching are deleted after it started. - // Note that unlike the scrolled data summary method above we cannot check for this situation - // by checking for zero hits, because aggregations that work on rollups return zero hits even - // when they retrieve data. - if (aggregations == null) { - return AggregatedDataSummary.noDataSummary(context.histogramInterval); + try { + LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + + Aggregations aggregations = searchResponse.getAggregations(); + // This can happen if all the indices the datafeed is searching are deleted after it started. + // Note that unlike the scrolled data summary method above we cannot check for this situation + // by checking for zero hits, because aggregations that work on rollups return zero hits even + // when they retrieve data. + if (aggregations == null) { + return AggregatedDataSummary.noDataSummary(context.histogramInterval); + } + Min min = aggregations.get(EARLIEST_TIME); + Max max = aggregations.get(LATEST_TIME); + return new AggregatedDataSummary(min.value(), max.value(), context.histogramInterval); + } finally { + searchResponse.decRef(); } - Min min = aggregations.get(EARLIEST_TIME); - Max max = aggregations.get(LATEST_TIME); - return new AggregatedDataSummary(min.value(), max.value(), context.histogramInterval); } private SearchSourceBuilder rangeSearchBuilder() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index c721462697b65..4cfcf6509faa0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -122,12 +122,15 @@ private Optional tryNextStream() throws IOException { protected InputStream initScroll(long startTimestamp) throws IOException { logger.debug("[{}] Initializing scroll with start time [{}]", context.jobId, startTimestamp); SearchResponse searchResponse = executeSearchRequest(buildSearchRequest(startTimestamp)); - logger.debug("[{}] Search response was obtained", context.jobId); - timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - scrollId = searchResponse.getScrollId(); - SearchHit hits[] = searchResponse.getHits().getHits(); - searchResponse = null; - return processAndConsumeSearchHits(hits); + try { + logger.debug("[{}] Search response was obtained", context.jobId); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + scrollId = searchResponse.getScrollId(); + SearchHit hits[] = searchResponse.getHits().getHits(); + return processAndConsumeSearchHits(hits); + } finally { + searchResponse.decRef(); + } } protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { @@ -137,11 +140,17 @@ protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequest client, searchRequestBuilder::get ); + boolean success = false; try { checkForSkippedClusters(searchResponse); + success = true; } catch (ResourceNotFoundException e) { clearScrollLoggingExceptions(searchResponse.getScrollId()); throw e; + } finally { + if (success == false) { + searchResponse.decRef(); + } } return searchResponse; } @@ -213,23 +222,28 @@ private InputStream processAndConsumeSearchHits(SearchHit hits[]) throws IOExcep private InputStream continueScroll() throws IOException { logger.debug("[{}] Continuing scroll with id [{}]", context.jobId, scrollId); - SearchResponse searchResponse; + SearchResponse searchResponse = null; try { - searchResponse = executeSearchScrollRequest(scrollId); - } catch (SearchPhaseExecutionException searchExecutionException) { - if (searchHasShardFailure) { - throw searchExecutionException; + try { + searchResponse = executeSearchScrollRequest(scrollId); + } catch (SearchPhaseExecutionException searchExecutionException) { + if (searchHasShardFailure) { + throw searchExecutionException; + } + logger.debug("[{}] search failed due to SearchPhaseExecutionException. Will attempt again with new scroll", context.jobId); + markScrollAsErrored(); + searchResponse = executeSearchRequest(buildSearchRequest(lastTimestamp == null ? context.start : lastTimestamp)); + } + logger.debug("[{}] Search response was obtained", context.jobId); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + scrollId = searchResponse.getScrollId(); + SearchHit hits[] = searchResponse.getHits().getHits(); + return processAndConsumeSearchHits(hits); + } finally { + if (searchResponse != null) { + searchResponse.decRef(); } - logger.debug("[{}] search failed due to SearchPhaseExecutionException. Will attempt again with new scroll", context.jobId); - markScrollAsErrored(); - searchResponse = executeSearchRequest(buildSearchRequest(lastTimestamp == null ? context.start : lastTimestamp)); } - logger.debug("[{}] Search response was obtained", context.jobId); - timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - scrollId = searchResponse.getScrollId(); - SearchHit hits[] = searchResponse.getHits().getHits(); - searchResponse = null; - return processAndConsumeSearchHits(hits); } void markScrollAsErrored() { @@ -250,11 +264,17 @@ protected SearchResponse executeSearchScrollRequest(String scrollId) { client, () -> new SearchScrollRequestBuilder(client).setScroll(SCROLL_TIMEOUT).setScrollId(scrollId).get() ); + boolean success = false; try { checkForSkippedClusters(searchResponse); + success = true; } catch (ResourceNotFoundException e) { clearScrollLoggingExceptions(searchResponse.getScrollId()); throw e; + } finally { + if (success == false) { + searchResponse.decRef(); + } } return searchResponse; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index ab6ee250df5c6..6c3fb28fe2c83 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -175,14 +175,18 @@ private List tryRequestWithSearchResponse(Supplier request) // We've set allow_partial_search_results to false which means if something // goes wrong the request will throw. SearchResponse searchResponse = request.get(); - LOGGER.trace(() -> "[" + context.jobId + "] Search response was obtained"); + try { + LOGGER.trace(() -> "[" + context.jobId + "] Search response was obtained"); - List rows = processSearchResponse(searchResponse); + List rows = processSearchResponse(searchResponse); - // Request was successfully executed and processed so we can restore the flag to retry if a future failure occurs - hasPreviousSearchFailed = false; + // Request was successfully executed and processed so we can restore the flag to retry if a future failure occurs + hasPreviousSearchFailed = false; - return rows; + return rows; + } finally { + searchResponse.decRef(); + } } catch (Exception e) { if (hasPreviousSearchFailed) { throw e; @@ -370,9 +374,13 @@ public ExtractedFields getExtractedFields() { public DataSummary collectDataSummary() { SearchRequestBuilder searchRequestBuilder = buildDataSummarySearchRequestBuilder(); SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); - long rows = searchResponse.getHits().getTotalHits().value; - LOGGER.debug(() -> format("[%s] Data summary rows [%s]", context.jobId, rows)); - return new DataSummary(rows, organicFeatures.length + processedFeatures.length); + try { + long rows = searchResponse.getHits().getTotalHits().value; + LOGGER.debug(() -> format("[%s] Data summary rows [%s]", context.jobId, rows)); + return new DataSummary(rows, organicFeatures.length + processedFeatures.length); + } finally { + searchResponse.decRef(); + } } public void collectDataSummaryAsync(ActionListener dataSummaryActionListener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index cc59903436e2f..c9ce6e0d4e3c7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -155,21 +155,24 @@ private InferenceState restoreInferenceState() { client, () -> client.search(searchRequest).actionGet() ); - - Max maxIncrementalIdAgg = searchResponse.getAggregations().get(DestinationIndex.INCREMENTAL_ID); - long processedTestDocCount = searchResponse.getHits().getTotalHits().value; - Long lastIncrementalId = processedTestDocCount == 0 ? null : (long) maxIncrementalIdAgg.value(); - if (lastIncrementalId != null) { - LOGGER.debug( - () -> format( - "[%s] Resuming inference; last incremental id [%s]; processed test doc count [%s]", - config.getId(), - lastIncrementalId, - processedTestDocCount - ) - ); + try { + Max maxIncrementalIdAgg = searchResponse.getAggregations().get(DestinationIndex.INCREMENTAL_ID); + long processedTestDocCount = searchResponse.getHits().getTotalHits().value; + Long lastIncrementalId = processedTestDocCount == 0 ? null : (long) maxIncrementalIdAgg.value(); + if (lastIncrementalId != null) { + LOGGER.debug( + () -> format( + "[%s] Resuming inference; last incremental id [%s]; processed test doc count [%s]", + config.getId(), + lastIncrementalId, + processedTestDocCount + ) + ); + } + return new InferenceState(lastIncrementalId, processedTestDocCount); + } finally { + searchResponse.decRef(); } - return new InferenceState(lastIncrementalId, processedTestDocCount); } // Visible for testing diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java index de306b2ece1a2..d4c10e25a2ade 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java @@ -170,7 +170,11 @@ private boolean hasModelState(DataFrameAnalyticsConfig config) { .setFetchSource(false) .setQuery(QueryBuilders.idsQuery().addIds(config.getAnalysis().getStateDocIdPrefix(config.getId()) + "1")) .get(); - return searchResponse.getHits().getHits().length == 1; + try { + return searchResponse.getHits().getHits().length == 1; + } finally { + searchResponse.decRef(); + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcess.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcess.java index a77280d7ba0c8..ed59c7f86fdd9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcess.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcess.java @@ -85,12 +85,16 @@ public void restoreState(Client client, String stateDocIdPrefix) throws IOExcept .setSize(1) .setQuery(QueryBuilders.idsQuery().addIds(stateDocIdPrefix + ++docNum)) .get(); - if (stateResponse.getHits().getHits().length == 0) { - break; + try { + if (stateResponse.getHits().getHits().length == 0) { + break; + } + SearchHit stateDoc = stateResponse.getHits().getAt(0); + logger.debug(() -> format("[%s] Restoring state document [%s]", config.jobId(), stateDoc.getId())); + StateToProcessWriterHelper.writeStateToStream(stateDoc.getSourceRef(), restoreStream); + } finally { + stateResponse.decRef(); } - SearchHit stateDoc = stateResponse.getHits().getAt(0); - logger.debug(() -> format("[%s] Restoring state document [%s]", config.jobId(), stateDoc.getId())); - StateToProcessWriterHelper.writeStateToStream(stateDoc.getSourceRef(), restoreStream); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java index d3f33b91936d8..ebe4295f8efbf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java @@ -64,13 +64,17 @@ private TrainTestSplitter createSingleClassSplitter(Regression regression) { client, searchRequestBuilder::get ); - return new SingleClassReservoirTrainTestSplitter( - fieldNames, - regression.getDependentVariable(), - regression.getTrainingPercent(), - regression.getRandomizeSeed(), - searchResponse.getHits().getTotalHits().value - ); + try { + return new SingleClassReservoirTrainTestSplitter( + fieldNames, + regression.getDependentVariable(), + regression.getTrainingPercent(), + regression.getRandomizeSeed(), + searchResponse.getHits().getTotalHits().value + ); + } finally { + searchResponse.decRef(); + } } catch (Exception e) { String msg = "[" + config.getId() + "] Error searching total number of training docs"; LOGGER.error(msg, e); @@ -96,20 +100,24 @@ private TrainTestSplitter createStratifiedSplitter(Classification classification client, searchRequestBuilder::get ); - Aggregations aggs = searchResponse.getAggregations(); - Terms terms = aggs.get(aggName); - Map classCounts = new HashMap<>(); - for (Terms.Bucket bucket : terms.getBuckets()) { - classCounts.put(String.valueOf(bucket.getKey()), bucket.getDocCount()); - } + try { + Aggregations aggs = searchResponse.getAggregations(); + Terms terms = aggs.get(aggName); + Map classCounts = new HashMap<>(); + for (Terms.Bucket bucket : terms.getBuckets()) { + classCounts.put(String.valueOf(bucket.getKey()), bucket.getDocCount()); + } - return new StratifiedTrainTestSplitter( - fieldNames, - classification.getDependentVariable(), - classCounts, - classification.getTrainingPercent(), - classification.getRandomizeSeed() - ); + return new StratifiedTrainTestSplitter( + fieldNames, + classification.getDependentVariable(), + classCounts, + classification.getTrainingPercent(), + classification.getRandomizeSeed() + ); + } finally { + searchResponse.decRef(); + } } catch (Exception e) { String msg = "[" + config.getId() + "] Dependent variable terms search failed"; LOGGER.error(msg, e); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index 3ace40e0deb6b..0a34915083982 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -158,54 +158,58 @@ private void doSearch( SEARCH_RETRY_LIMIT, SEARCH_FAILURE_RETRY_WAIT_TIME ); - if (searchResponse.getHits().getHits().length == 0) { - errorConsumer.accept(new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId))); - return; - } + try { + if (searchResponse.getHits().getHits().length == 0) { + errorConsumer.accept(new ResourceNotFoundException(Messages.getMessage(Messages.MODEL_DEFINITION_NOT_FOUND, modelId))); + return; + } - // Set lastNum to a non-zero to prevent an infinite loop of - // search after requests in the absolute worse case where - // it has all gone wrong. - // Docs are numbered 0..N. we must have seen at least - // this many docs so far. - int lastNum = numDocsWritten - 1; - for (SearchHit hit : searchResponse.getHits().getHits()) { - logger.debug(() -> format("[%s] Restoring model definition doc with id [%s]", modelId, hit.getId())); - try { - TrainedModelDefinitionDoc doc = parseModelDefinitionDocLenientlyFromSource( - hit.getSourceRef(), - modelId, - xContentRegistry - ); - lastNum = doc.getDocNum(); + // Set lastNum to a non-zero to prevent an infinite loop of + // search after requests in the absolute worse case where + // it has all gone wrong. + // Docs are numbered 0..N. we must have seen at least + // this many docs so far. + int lastNum = numDocsWritten - 1; + for (SearchHit hit : searchResponse.getHits().getHits()) { + logger.debug(() -> format("[%s] Restoring model definition doc with id [%s]", modelId, hit.getId())); + try { + TrainedModelDefinitionDoc doc = parseModelDefinitionDocLenientlyFromSource( + hit.getSourceRef(), + modelId, + xContentRegistry + ); + lastNum = doc.getDocNum(); - boolean continueSearching = modelConsumer.apply(doc); - if (continueSearching == false) { - // signal the search has finished early - successConsumer.accept(Boolean.FALSE); + boolean continueSearching = modelConsumer.apply(doc); + if (continueSearching == false) { + // signal the search has finished early + successConsumer.accept(Boolean.FALSE); + return; + } + + } catch (IOException e) { + logger.error(() -> "[" + modelId + "] error writing model definition", e); + errorConsumer.accept(e); return; } - - } catch (IOException e) { - logger.error(() -> "[" + modelId + "] error writing model definition", e); - errorConsumer.accept(e); - return; } - } - numDocsWritten += searchResponse.getHits().getHits().length; + numDocsWritten += searchResponse.getHits().getHits().length; - boolean endOfSearch = searchResponse.getHits().getHits().length < searchSize - || searchResponse.getHits().getTotalHits().value == numDocsWritten; + boolean endOfSearch = searchResponse.getHits().getHits().length < searchSize + || searchResponse.getHits().getTotalHits().value == numDocsWritten; - if (endOfSearch) { - successConsumer.accept(Boolean.TRUE); - } else { - // search again with after - SearchHit lastHit = searchResponse.getHits().getAt(searchResponse.getHits().getHits().length - 1); - SearchRequestBuilder searchRequestBuilder = buildSearchBuilder(client, modelId, index, searchSize); - searchRequestBuilder.searchAfter(new Object[] { lastHit.getIndex(), lastNum }); - executorService.execute(() -> doSearch(searchRequestBuilder.request(), modelConsumer, successConsumer, errorConsumer)); + if (endOfSearch) { + successConsumer.accept(Boolean.TRUE); + } else { + // search again with after + SearchHit lastHit = searchResponse.getHits().getAt(searchResponse.getHits().getHits().length - 1); + SearchRequestBuilder searchRequestBuilder = buildSearchBuilder(client, modelId, index, searchSize); + searchRequestBuilder.searchAfter(new Object[] { lastHit.getIndex(), lastNum }); + executorService.execute(() -> doSearch(searchRequestBuilder.request(), modelConsumer, successConsumer, errorConsumer)); + } + } finally { + searchResponse.decRef(); } } catch (Exception e) { if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java index 761bfa16e66bb..6fbe16192a875 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersister.java @@ -328,10 +328,14 @@ public void persistQuantiles(Quantiles quantiles, Supplier shouldRetry) shouldRetry, retryMessage -> logger.debug("[{}] {} {}", jobId, quantilesDocId, retryMessage) ); - String indexOrAlias = searchResponse.getHits().getHits().length > 0 - ? searchResponse.getHits().getHits()[0].getIndex() - : AnomalyDetectorsIndex.jobStateIndexWriteAlias(); - + final String indexOrAlias; + try { + indexOrAlias = searchResponse.getHits().getHits().length > 0 + ? searchResponse.getHits().getHits()[0].getIndex() + : AnomalyDetectorsIndex.jobStateIndexWriteAlias(); + } finally { + searchResponse.decRef(); + } Persistable persistable = new Persistable(indexOrAlias, quantiles.getJobId(), quantiles, quantilesDocId); persistable.persist(shouldRetry, AnomalyDetectorsIndex.jobStateIndexWriteAlias().equals(indexOrAlias)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index d309ee2e5dc95..7b41f3e055874 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -1422,24 +1422,27 @@ public QueryPage modelPlot(String jobId, int from, int size) { .setTrackTotalHits(true) .get(); } - - List results = new ArrayList<>(); - - for (SearchHit hit : searchResponse.getHits().getHits()) { - BytesReference source = hit.getSourceRef(); - try ( - InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), stream) - ) { - ModelPlot modelPlot = ModelPlot.LENIENT_PARSER.apply(parser, null); - results.add(modelPlot); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to parse modelPlot", e); + try { + List results = new ArrayList<>(); + + for (SearchHit hit : searchResponse.getHits().getHits()) { + BytesReference source = hit.getSourceRef(); + try ( + InputStream stream = source.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), stream) + ) { + ModelPlot modelPlot = ModelPlot.LENIENT_PARSER.apply(parser, null); + results.add(modelPlot); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to parse modelPlot", e); + } } - } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + } finally { + searchResponse.decRef(); + } } public QueryPage categorizerStats(String jobId, int from, int size) { @@ -1456,24 +1459,27 @@ public QueryPage categorizerStats(String jobId, int from, int .setTrackTotalHits(true) .get(); } - - List results = new ArrayList<>(); - - for (SearchHit hit : searchResponse.getHits().getHits()) { - BytesReference source = hit.getSourceRef(); - try ( - InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), stream) - ) { - CategorizerStats categorizerStats = CategorizerStats.LENIENT_PARSER.apply(parser, null).build(); - results.add(categorizerStats); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to parse categorizerStats", e); + try { + List results = new ArrayList<>(); + + for (SearchHit hit : searchResponse.getHits().getHits()) { + BytesReference source = hit.getSourceRef(); + try ( + InputStream stream = source.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), stream) + ) { + CategorizerStats categorizerStats = CategorizerStats.LENIENT_PARSER.apply(parser, null).build(); + results.add(categorizerStats); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to parse categorizerStats", e); + } } - } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + } finally { + searchResponse.decRef(); + } } /** diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java index 20d29c1f0a2de..d97f564e0d50a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamer.java @@ -76,17 +76,21 @@ public void restoreStateToStream(String jobId, ModelSnapshot modelSnapshot, Outp .setSize(1) .setQuery(QueryBuilders.idsQuery().addIds(stateDocId)) .get(); - if (stateResponse.getHits().getHits().length == 0) { - LOGGER.error( - "Expected {} documents for model state for {} snapshot {} but failed to find {}", - modelSnapshot.getSnapshotDocCount(), - jobId, - modelSnapshot.getSnapshotId(), - stateDocId - ); - break; + try { + if (stateResponse.getHits().getHits().length == 0) { + LOGGER.error( + "Expected {} documents for model state for {} snapshot {} but failed to find {}", + modelSnapshot.getSnapshotDocCount(), + jobId, + modelSnapshot.getSnapshotId(), + stateDocId + ); + break; + } + writeStateToStream(stateResponse.getHits().getAt(0).getSourceRef(), restoreStream); + } finally { + stateResponse.decRef(); } - writeStateToStream(stateResponse.getHits().getAt(0).getSourceRef(), restoreStream); } } @@ -108,10 +112,14 @@ public void restoreStateToStream(String jobId, ModelSnapshot modelSnapshot, Outp .setSize(1) .setQuery(QueryBuilders.idsQuery().addIds(docId)) .get(); - if (stateResponse.getHits().getHits().length == 0) { - break; + try { + if (stateResponse.getHits().getHits().length == 0) { + break; + } + writeStateToStream(stateResponse.getHits().getAt(0).getSourceRef(), restoreStream); + } finally { + stateResponse.decRef(); } - writeStateToStream(stateResponse.getHits().getAt(0).getSourceRef(), restoreStream); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessor.java index cb911c56ece40..56b0483e07c78 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessor.java @@ -228,8 +228,12 @@ private String getConcreteIndexOrWriteAlias(String documentId) { () -> true, retryMessage -> LOGGER.debug("[{}] {} {}", jobId, documentId, retryMessage) ); - return searchResponse.getHits().getHits().length > 0 - ? searchResponse.getHits().getHits()[0].getIndex() - : AnomalyDetectorsIndex.jobStateIndexWriteAlias(); + try { + return searchResponse.getHits().getHits().length > 0 + ? searchResponse.getHits().getHits()[0].getIndex() + : AnomalyDetectorsIndex.jobStateIndexWriteAlias(); + } finally { + searchResponse.decRef(); + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java index 0223898444115..c1e600aa66ba5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java @@ -85,8 +85,12 @@ public Deque next() { SearchScrollRequest searchScrollRequest = new SearchScrollRequest(scrollId).scroll(CONTEXT_ALIVE_DURATION); searchResponse = client.searchScroll(searchScrollRequest).actionGet(); } - scrollId = searchResponse.getScrollId(); - return mapHits(searchResponse); + try { + scrollId = searchResponse.getScrollId(); + return mapHits(searchResponse); + } finally { + searchResponse.decRef(); + } } private SearchResponse initScroll() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java index e87fbf48ca421..5630f16e63351 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java @@ -294,7 +294,10 @@ public SearchResponse searchWithRetry( client, () -> (isShutdown == false) && shouldRetry.get(), retryMsgHandler, - removeListener + removeListener.delegateFailure((l, r) -> { + r.mustIncRef(); + l.onResponse(r); + }) ); onGoingRetryableSearchActions.put(key, mlRetryableAction); mlRetryableAction.run(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java index 0d892209c8eda..f63f6e0549179 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java @@ -108,10 +108,14 @@ public Deque next() { } SearchResponse searchResponse = doSearch(searchAfterFields()); - if (trackTotalHits && totalHits.get() == 0) { - totalHits.set(searchResponse.getHits().getTotalHits().value); + try { + if (trackTotalHits && totalHits.get() == 0) { + totalHits.set(searchResponse.getHits().getTotalHits().value); + } + return mapHits(searchResponse); + } finally { + searchResponse.decRef(); } - return mapHits(searchResponse); } private SearchResponse doSearch(Object[] searchAfterValues) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java index 6dbcef08481d1..9d6186e9c1c48 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java @@ -88,36 +88,40 @@ SearchInput.Result doExecute(WatchExecutionContext ctx, WatcherSearchTemplateReq client, () -> client.search(searchRequest).actionGet(timeout) ); + try { - if (logger.isDebugEnabled()) { - logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value); - } + if (logger.isDebugEnabled()) { + logger.debug("[{}] found [{}] hits", ctx.id(), response.getHits().getTotalHits().value); + } - final Payload payload; - final Params params; - if (request.isRestTotalHitsAsint()) { - params = new MapParams(Collections.singletonMap("rest_total_hits_as_int", "true")); - } else { - params = EMPTY_PARAMS; - } - if (input.getExtractKeys() != null) { - BytesReference bytes = XContentHelper.toXContent(response, XContentType.SMILE, params, false); - // EMPTY is safe here because we never use namedObject - try ( - XContentParser parser = XContentHelper.createParser( - NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, - bytes, - XContentType.SMILE - ) - ) { - Map filteredKeys = XContentFilterKeysUtils.filterMapOrdered(input.getExtractKeys(), parser); - payload = new Payload.Simple(filteredKeys); + final Payload payload; + final Params params; + if (request.isRestTotalHitsAsint()) { + params = new MapParams(Collections.singletonMap("rest_total_hits_as_int", "true")); + } else { + params = EMPTY_PARAMS; + } + if (input.getExtractKeys() != null) { + BytesReference bytes = XContentHelper.toXContent(response, XContentType.SMILE, params, false); + // EMPTY is safe here because we never use namedObject + try ( + XContentParser parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + bytes, + XContentType.SMILE + ) + ) { + Map filteredKeys = XContentFilterKeysUtils.filterMapOrdered(input.getExtractKeys(), parser); + payload = new Payload.Simple(filteredKeys); + } + } else { + payload = new Payload.XContent(response, params); } - } else { - payload = new Payload.XContent(response, params); - } - return new SearchInput.Result(request, payload); + return new SearchInput.Result(request, payload); + } finally { + response.decRef(); + } } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/ExecutableSearchTransform.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/ExecutableSearchTransform.java index 2498fb6b4e50b..2a67d48c98f4e 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/ExecutableSearchTransform.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transform/search/ExecutableSearchTransform.java @@ -62,13 +62,17 @@ public SearchTransform.Result execute(WatchExecutionContext ctx, Payload payload client, () -> client.search(searchRequest).actionGet(timeout) ); - final Params params; - if (request.isRestTotalHitsAsint()) { - params = new MapParams(Collections.singletonMap("rest_total_hits_as_int", "true")); - } else { - params = EMPTY_PARAMS; + try { + final Params params; + if (request.isRestTotalHitsAsint()) { + params = new MapParams(Collections.singletonMap("rest_total_hits_as_int", "true")); + } else { + params = EMPTY_PARAMS; + } + return new SearchTransform.Result(request, new Payload.XContent(resp, params)); + } finally { + resp.decRef(); } - return new SearchTransform.Result(request, new Payload.XContent(resp, params)); } catch (Exception e) { logger.error(() -> format("failed to execute [%s] transform for [%s]", TYPE, ctx.id()), e); return new SearchTransform.Result(request, e); From d6e8217b0090730a91eadd09f736bce0b93d14c4 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 6 Dec 2023 14:33:05 -0500 Subject: [PATCH 263/263] Ensure dynamicMapping updates are handled in insertion order (#103047) The switch to holding dynamic fields in a hashmap effectively randomizes their iteration order. This can be troublesome when building the mapping update required by these updates. When iterating in an unknown order, recursing to the leaf mapper can occur many times `O(n^2)`. However, starting with insertion order, it will occur only `O(n)` times. closes: https://github.com/elastic/elasticsearch/issues/103011 --- docs/changelog/103047.yaml | 5 +++++ .../elasticsearch/index/mapper/DocumentParserContext.java | 6 +++--- 2 files changed, 8 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/103047.yaml diff --git a/docs/changelog/103047.yaml b/docs/changelog/103047.yaml new file mode 100644 index 0000000000000..59f86d679b55f --- /dev/null +++ b/docs/changelog/103047.yaml @@ -0,0 +1,5 @@ +pr: 103047 +summary: Ensure `dynamicMapping` updates are handled in insertion order +area: Mapping +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index f47b392115f81..9d5cb374a9a89 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -22,8 +22,8 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -166,9 +166,9 @@ protected DocumentParserContext( mappingParserContext, source, new HashSet<>(), - new HashMap<>(), + new LinkedHashMap<>(), new HashSet<>(), - new HashMap<>(), + new LinkedHashMap<>(), new ArrayList<>(), null, null,