From bdce88a190efe7c3586119a476458896bd87ffd6 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 14 Oct 2024 09:34:35 +0100 Subject: [PATCH 01/19] [ML] Feature flag default configs (#114660) --- .../org/elasticsearch/xpack/inference/InferencePlugin.java | 6 ++++-- .../rest-api-spec/test/inference/inference_crud.yml | 4 ++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index d251120980e0b..d361ce0837b93 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -217,8 +217,10 @@ public Collection createComponents(PluginServices services) { // reference correctly var registry = new InferenceServiceRegistry(inferenceServices, factoryContext); registry.init(services.client()); - for (var service : registry.getServices().values()) { - service.defaultConfigs().forEach(modelRegistry::addDefaultConfiguration); + if (DefaultElserFeatureFlag.isEnabled()) { + for (var service : registry.getServices().values()) { + service.defaultConfigs().forEach(modelRegistry::addDefaultConfiguration); + } } inferenceServiceRegistry.set(registry); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml index 11be68cc764e2..b1f640a40b34e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml @@ -41,6 +41,10 @@ --- "Test get all": + - requires: + cluster_features: "semantic_text.default_elser_2" + reason: semantic_text default ELSER 2 inference ID introduced in 8.16.0 + - do: inference.get: inference_id: "*" From 1d037811fd9e0d407b34bf96635cf118a5f11d46 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 14 Oct 2024 10:47:24 +0200 Subject: [PATCH 02/19] Renovate Bot PRs should run ci checks (#114699) --- .buildkite/pull-requests.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index 235a4b2dbb4ad..ea4f34bcbe11e 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -8,6 +8,7 @@ "admin", "write" ], + "allowed_list": ["elastic-renovate-prod[bot]"], "set_commit_status": false, "build_on_commit": true, "build_on_comment": true, From 08bad488cfabeb5c158eda8f385da9563792fb40 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 14 Oct 2024 10:56:30 +0200 Subject: [PATCH 03/19] Simplify NodeShutdownShardsIT (#114583) We no longer need to manually reroute after registering node shutdown in test since https://github.com/elastic/elasticsearch/pull/103251 --- .../xpack/shutdown/NodeShutdownShardsIT.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index d12d093dd5b8d..0e162238e96c8 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -327,11 +327,8 @@ public void testAutoExpandDuringRestart() throws Exception { ensureGreen("myindex"); putNodeShutdown(primaryNodeId, SingleNodeShutdownMetadata.Type.RESTART, null); - // registering node shutdown entry does not perform reroute, neither should it. - // we provoke it here in the test to ensure that auto-expansion has run. - updateIndexSettings(Settings.builder().put("index.routing.allocation.exclude.name", "non-existent"), "myindex"); - assertBusy(() -> assertIndexSetting("myindex", "index.number_of_replicas", "1")); + assertIndexSetting("myindex", "index.number_of_replicas", "1"); indexRandomData("myindex"); internalCluster().restartNode(primaryNode, new InternalTestCluster.RestartCallback() { @@ -361,9 +358,6 @@ public void testAutoExpandDuringReplace() throws Exception { var replacementNodeName = "node_t2"; putNodeShutdown(nodeIdToReplace, SingleNodeShutdownMetadata.Type.REPLACE, replacementNodeName); - // registering node shutdown entry does not perform reroute, neither should it. - // we provoke it here in the test to ensure that auto-expansion has run. - updateIndexSettings(Settings.builder().put("index.routing.allocation.exclude.name", "non-existent"), "index"); ensureGreen("index"); assertIndexSetting("index", "index.number_of_replicas", "1"); From 30ff4741c674532c832c53a3bd31aa307dce8a95 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Mon, 14 Oct 2024 11:05:35 +0200 Subject: [PATCH 04/19] Add generated code changes for HypotEvaluator (#114697) --- .../function/scalar/math/HypotEvaluator.java | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java index f5684bcb4be18..22094f7e623e6 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/HypotEvaluator.java @@ -13,16 +13,16 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.Warnings; /** * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Hypot}. * This class is generated. Do not edit it. */ public final class HypotEvaluator implements EvalOperator.ExpressionEvaluator { - private final Warnings warnings; + private final Source source; private final EvalOperator.ExpressionEvaluator n1; @@ -30,12 +30,14 @@ public final class HypotEvaluator implements EvalOperator.ExpressionEvaluator { private final DriverContext driverContext; + private Warnings warnings; + public HypotEvaluator(Source source, EvalOperator.ExpressionEvaluator n1, EvalOperator.ExpressionEvaluator n2, DriverContext driverContext) { + this.source = source; this.n1 = n1; this.n2 = n2; this.driverContext = driverContext; - this.warnings = Warnings.createWarnings(driverContext.warningsMode(), source); } @Override @@ -64,7 +66,7 @@ public DoubleBlock eval(int positionCount, DoubleBlock n1Block, DoubleBlock n2Bl } if (n1Block.getValueCount(p) != 1) { if (n1Block.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); } result.appendNull(); continue position; @@ -75,7 +77,7 @@ public DoubleBlock eval(int positionCount, DoubleBlock n1Block, DoubleBlock n2Bl } if (n2Block.getValueCount(p) != 1) { if (n2Block.getValueCount(p) > 1) { - warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); } result.appendNull(); continue position; @@ -105,6 +107,18 @@ public void close() { Releasables.closeExpectNoException(n1, n2); } + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { private final Source source; From 2af19d87b08565b2b3960bee8b7e797b4ef27190 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 14 Oct 2024 12:24:40 +0300 Subject: [PATCH 05/19] ES|QL: Restrict sorting for _source and counter field types (#114638) --- docs/changelog/114638.yaml | 7 ++++++ .../xpack/esql/core/type/DataType.java | 8 +++++++ .../src/main/resources/tsdb-mapping.json | 4 ++++ .../xpack/esql/action/EsqlCapabilities.java | 7 +++++- .../xpack/esql/analysis/Verifier.java | 8 +++---- .../expression/function/aggregate/Rate.java | 2 +- .../xpack/esql/analysis/AnalyzerTests.java | 2 +- .../xpack/esql/analysis/VerifierTests.java | 23 +++++++++++++++++++ .../rest-api-spec/test/esql/140_metadata.yml | 16 +++++++++++++ .../rest-api-spec/test/esql/40_tsdb.yml | 12 ++++++++-- 10 files changed, 80 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/114638.yaml diff --git a/docs/changelog/114638.yaml b/docs/changelog/114638.yaml new file mode 100644 index 0000000000000..0386aacfe3e18 --- /dev/null +++ b/docs/changelog/114638.yaml @@ -0,0 +1,7 @@ +pr: 114638 +summary: "ES|QL: Restrict sorting for `_source` and counter field types" +area: ES|QL +type: bug +issues: + - 114423 + - 111976 diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index c0092caeb9d5d..b23703c6d8b66 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -425,6 +425,10 @@ public static boolean isRepresentable(DataType t) { && t.isCounter() == false; } + public static boolean isCounter(DataType t) { + return t == COUNTER_DOUBLE || t == COUNTER_INTEGER || t == COUNTER_LONG; + } + public static boolean isSpatialPoint(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT; } @@ -437,6 +441,10 @@ public static boolean isSpatial(DataType t) { return t == GEO_POINT || t == CARTESIAN_POINT || t == GEO_SHAPE || t == CARTESIAN_SHAPE; } + public static boolean isSortable(DataType t) { + return false == (t == SOURCE || isCounter(t) || isSpatial(t)); + } + public String nameUpper() { return name; } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/tsdb-mapping.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/tsdb-mapping.json index dd4073d5dc7cf..39b1b10edd916 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/tsdb-mapping.json +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/tsdb-mapping.json @@ -27,6 +27,10 @@ "message_in": { "type": "float", "time_series_metric": "counter" + }, + "message_out": { + "type": "integer", + "time_series_metric": "counter" } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 2e979dcce1758..1d6d81077b9be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -365,7 +365,12 @@ public enum Cap { /** * Support named parameters for field names. */ - NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES; + NAMED_PARAMETER_FOR_FIELD_AND_FUNCTION_NAMES, + + /** + * Fix sorting not allowed on _source and counters. + */ + SORTING_ON_SOURCE_AND_COUNTERS_FORBIDDEN; private final boolean snapshotOnly; private final FeatureFlag featureFlag; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index e45db0c02be7e..dd2b72b4d35d9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -190,7 +190,7 @@ else if (p instanceof Lookup lookup) { checkOperationsOnUnsignedLong(p, failures); checkBinaryComparison(p, failures); - checkForSortOnSpatialTypes(p, failures); + checkForSortableDataTypes(p, failures); checkFilterMatchConditions(p, failures); checkFullTextQueryFunctions(p, failures); @@ -555,12 +555,12 @@ private static Failure validateUnsignedLongNegation(Neg neg) { } /** - * Makes sure that spatial types do not appear in sorting contexts. + * Some datatypes are not sortable */ - private static void checkForSortOnSpatialTypes(LogicalPlan p, Set localFailures) { + private static void checkForSortableDataTypes(LogicalPlan p, Set localFailures) { if (p instanceof OrderBy ob) { ob.order().forEach(order -> { - if (DataType.isSpatial(order.dataType())) { + if (DataType.isSortable(order.dataType()) == false) { localFailures.add(fail(order, "cannot sort on " + order.dataType().typeName())); } }); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java index f5597b7d64e81..135264c448f10 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Rate.java @@ -115,7 +115,7 @@ public DataType dataType() { protected TypeResolution resolveType() { TypeResolution resolution = isType( field(), - dt -> dt == DataType.COUNTER_LONG || dt == DataType.COUNTER_INTEGER || dt == DataType.COUNTER_DOUBLE, + dt -> DataType.isCounter(dt), sourceText(), FIRST, "counter_long", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 5d75549893512..6644f9b17055e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1638,7 +1638,7 @@ public void testCounterTypes() { var attributes = limit.output().stream().collect(Collectors.toMap(NamedExpression::name, a -> a)); assertThat( attributes.keySet(), - equalTo(Set.of("network.connections", "network.bytes_in", "network.bytes_out", "network.message_in")) + equalTo(Set.of("network.connections", "network.bytes_in", "network.bytes_out", "network.message_in", "network.message_out")) ); assertThat(attributes.get("network.connections").dataType(), equalTo(DataType.LONG)); assertThat(attributes.get("network.bytes_in").dataType(), equalTo(DataType.COUNTER_LONG)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 612f2870fe8bc..01c020b16ecad 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -26,12 +26,16 @@ import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.esql.EsqlTestUtils.paramAsConstant; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_DOUBLE; +import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_INTEGER; +import static org.elasticsearch.xpack.esql.core.type.DataType.COUNTER_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -909,6 +913,25 @@ public void testSpatialSort() { assertEquals("1:42: cannot sort on cartesian_shape", error("FROM countries_bbox_web | LIMIT 5 | sort shape", countriesBboxWeb)); } + public void testSourceSorting() { + assertEquals("1:35: cannot sort on _source", error("from test metadata _source | sort _source")); + } + + public void testCountersSorting() { + Map counterDataTypes = Map.of( + COUNTER_DOUBLE, + "network.message_in", + COUNTER_INTEGER, + "network.message_out", + COUNTER_LONG, + "network.bytes_out" + ); + for (DataType counterDT : counterDataTypes.keySet()) { + var fieldName = counterDataTypes.get(counterDT); + assertEquals("1:18: cannot sort on " + counterDT.name().toLowerCase(Locale.ROOT), error("from test | sort " + fieldName, tsdb)); + } + } + public void testInlineImpossibleConvert() { assertEquals("1:5: argument of [false::ip] must be [ip or string], found value [false] type [boolean]", error("ROW false::ip")); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml index 33c9cc7558672..83234901ae8f2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml @@ -155,3 +155,19 @@ setup: esql.query: body: query: 'FROM test [metadata _source] | STATS COUNT_DISTINCT(_source)' + +--- +"sort on _source not allowed": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [sorting_on_source_and_counters_forbidden] + reason: "Sorting on _source shouldn't have been possible" + - do: + catch: /cannot sort on _source/ + esql.query: + body: + query: 'FROM test metadata _source | sort _source' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml index 642407ac6d45b..ebf464ba667db 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_tsdb.yml @@ -178,11 +178,19 @@ cast counter then filter: --- sort on counter without cast: + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [sorting_on_source_and_counters_forbidden] + reason: "Sorting on counters shouldn't have been possible" - do: - catch: bad_request + catch: /cannot sort on counter_long/ esql.query: body: - query: 'from test | KEEP k8s.pod.network.tx | sort @k8s.pod.network.tx | limit 1' + query: 'from test | KEEP k8s.pod.network.tx | sort k8s.pod.network.tx | limit 1' --- cast then sort on counter: From 2f09fb66e95b327048156d2ef970ebd6c0e2fe23 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Mon, 14 Oct 2024 11:59:44 +0200 Subject: [PATCH 06/19] Preserve thread context when waiting for segment generation in RTG (#114623) Closes ES-9778 --- docs/changelog/114623.yaml | 5 +++++ .../org/elasticsearch/action/get/TransportGetAction.java | 9 +++++---- 2 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/114623.yaml diff --git a/docs/changelog/114623.yaml b/docs/changelog/114623.yaml new file mode 100644 index 0000000000000..817a8e874bcc0 --- /dev/null +++ b/docs/changelog/114623.yaml @@ -0,0 +1,5 @@ +pr: 114623 +summary: Preserve thread context when waiting for segment generation in RTG +area: CRUD +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 99eac250641ae..fb4b3907d2bfd 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -284,11 +285,11 @@ private void tryGetFromTranslog(GetRequest request, IndexShard indexShard, Disco } else { assert r.segmentGeneration() > -1L; assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; - indexShard.waitForPrimaryTermAndGeneration( - r.primaryTerm(), - r.segmentGeneration(), - listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)) + final ActionListener termAndGenerationListener = ContextPreservingActionListener.wrapPreservingContext( + listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)), + threadPool.getThreadContext() ); + indexShard.waitForPrimaryTermAndGeneration(r.primaryTerm(), r.segmentGeneration(), termAndGenerationListener); } } }), TransportGetFromTranslogAction.Response::new, getExecutor(request, shardId)) From 4ab2e6157db6ea02c2abd824ce4c9c03b560cd88 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 14 Oct 2024 12:01:53 +0200 Subject: [PATCH 07/19] Fix failing tests after PR clash (#114625) Two PRs conflicted without github or CI noticing. The first added these tests, and the second modified their behaviour. Both went green in CI and both were merged within an hour of each other. * PR that added the tests: * https://github.com/elastic/elasticsearch/pull/112938 * merged 14:13CET * PR that changed the behaviour of these tests: * https://github.com/elastic/elasticsearch/pull/114411 * merged 14:48CET --- muted-tests.yml | 6 -- .../optimizer/PhysicalPlanOptimizerTests.java | 58 +++++++++---------- 2 files changed, 28 insertions(+), 36 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index d0fc50de31bd1..975eb0c434054 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -363,12 +363,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=synonyms/60_synonym_rule_get/Synonym rule not found} issue: https://github.com/elastic/elasticsearch/issues/114444 -- class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests - method: testPushSpatialIntersectsEvalToSource {default} - issue: https://github.com/elastic/elasticsearch/issues/114627 -- class: org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizerTests - method: testPushWhereEvalToSource {default} - issue: https://github.com/elastic/elasticsearch/issues/114628 - class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT method: testGetModel issue: https://github.com/elastic/elasticsearch/issues/114657 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 6746b8ff61268..114aed68761fe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -3211,28 +3211,28 @@ public void testPushSpatialIntersectsStringToSource() { /** * Plan: * - * LimitExec[1000[INTEGER]] - * \_ExchangeExec[[],false] - * \_FragmentExec[filter=null, estimatedRowSize=0, reducer=[], fragment=[ + * EvalExec[[scalerank{f}#8 AS rank]] + * \_LimitExec[1000[INTEGER]] + * \_ExchangeExec[[],false] + * \_FragmentExec[filter=null, estimatedRowSize=0, reducer=[], fragment=[ * Limit[1000[INTEGER]] - * \_Filter[rank{r}#4 lt 4[INTEGER]] - * \_Eval[[scalerank{f}#8 AS rank]] - * \_EsRelation[airports][abbrev{f}#6, city{f}#12, city_location{f}#13, count..]]] + * \_Filter[scalerank{f}#8 < 4[INTEGER]] + * \_EsRelation[airports][abbrev{f}#6, city{f}#12, city_location{f}#13, count..]]] * * Optimized: * - * LimitExec[1000[INTEGER]] - * \_ExchangeExec[[abbrev{f}#6, city{f}#12, city_location{f}#13, country{f}#11, location{f}#10, name{f}#7, scalerank{f}#8, - * type{f}#9, rank{r}#4],false] - * \_ProjectExec[[abbrev{f}#6, city{f}#12, city_location{f}#13, country{f}#11, location{f}#10, name{f}#7, scalerank{f}#8, - * type{f}#9, rank{r}#4]] - * \_FieldExtractExec[abbrev{f}#6, city{f}#12, city_location{f}#13, count..][] - * \_LimitExec[1000[INTEGER]] - * \_EvalExec[[scalerank{f}#8 AS rank]] - * \_FieldExtractExec[scalerank{f}#8][] - * \_EsQueryExec[airports], indexMode[standard], query[{" - * esql_single_value":{"field":"scalerank","next":{"range":{"scalerank":{"lt":4,"boost":1.0}}},"source":"rank < 4@3:9"} - * }][_doc{f}#23], limit[], sort[] estimatedRowSize[304] + * EvalExec[[scalerank{f}#8 AS rank]] + * \_LimitExec[1000[INTEGER]] + * \_ExchangeExec[[abbrev{f}#6, city{f}#12, city_location{f}#13, country{f}#11, location{f}#10, name{f}#7, scalerank{f}#8, + * type{f}#9],false + * ] + * \_ProjectExec[[abbrev{f}#6, city{f}#12, city_location{f}#13, country{f}#11, location{f}#10, name{f}#7, scalerank{f}#8, + * type{f}#9] + * ] + * \_FieldExtractExec[abbrev{f}#6, city{f}#12, city_location{f}#13, count..][] + * \_EsQueryExec[airports], indexMode[standard], query[{ + * "esql_single_value":{"field":"scalerank","next":{"range":{"scalerank":{"lt":4,"boost":1.0}}},"source":"rank < 4@3:9"} + * ][_doc{f}#23], limit[1000], sort[] estimatedRowSize[304] * */ public void testPushWhereEvalToSource() { @@ -3243,7 +3243,8 @@ public void testPushWhereEvalToSource() { """; var plan = this.physicalPlan(query, airports); - var limit = as(plan, LimitExec.class); + var eval = as(plan, EvalExec.class); + var limit = as(eval.child(), LimitExec.class); var exchange = as(limit.child(), ExchangeExec.class); var fragment = as(exchange.child(), FragmentExec.class); var limit2 = as(fragment.fragment(), Limit.class); @@ -3251,16 +3252,14 @@ public void testPushWhereEvalToSource() { assertThat("filter contains LessThan", filter.condition(), instanceOf(LessThan.class)); var optimized = optimizedPlan(plan); - var topLimit = as(optimized, LimitExec.class); + eval = as(optimized, EvalExec.class); + var topLimit = as(eval.child(), LimitExec.class); exchange = as(topLimit.child(), ExchangeExec.class); var project = as(exchange.child(), ProjectExec.class); var fieldExtract = as(project.child(), FieldExtractExec.class); assertThat(fieldExtract.attributesToExtract().size(), greaterThan(5)); - limit = as(fieldExtract.child(), LimitExec.class); - var eval = as(limit.child(), EvalExec.class); - fieldExtract = as(eval.child(), FieldExtractExec.class); - assertThat(fieldExtract.attributesToExtract().stream().map(Attribute::name).collect(Collectors.toList()), contains("scalerank")); var source = source(fieldExtract.child()); + assertThat(source.limit(), is(topLimit.limit())); var condition = as(source.query(), SingleValueQuery.Builder.class); assertThat("Expected predicate to be passed to Lucene query", condition.source().text(), equalTo("rank < 4")); assertThat("Expected field to be passed to Lucene query", condition.field(), equalTo("scalerank")); @@ -3281,7 +3280,8 @@ public void testPushSpatialIntersectsEvalToSource() { """ }) { var plan = this.physicalPlan(query, airports); - var limit = as(plan, LimitExec.class); + var eval = as(plan, EvalExec.class); + var limit = as(eval.child(), LimitExec.class); var exchange = as(limit.child(), ExchangeExec.class); var fragment = as(exchange.child(), FragmentExec.class); var limit2 = as(fragment.fragment(), Limit.class); @@ -3289,16 +3289,14 @@ public void testPushSpatialIntersectsEvalToSource() { assertThat("filter contains ST_INTERSECTS", filter.condition(), instanceOf(SpatialIntersects.class)); var optimized = optimizedPlan(plan); - var topLimit = as(optimized, LimitExec.class); + eval = as(optimized, EvalExec.class); + var topLimit = as(eval.child(), LimitExec.class); exchange = as(topLimit.child(), ExchangeExec.class); var project = as(exchange.child(), ProjectExec.class); var fieldExtract = as(project.child(), FieldExtractExec.class); assertThat(fieldExtract.attributesToExtract().size(), greaterThan(5)); - limit = as(fieldExtract.child(), LimitExec.class); - var eval = as(limit.child(), EvalExec.class); - fieldExtract = as(eval.child(), FieldExtractExec.class); - assertThat(fieldExtract.attributesToExtract().stream().map(Attribute::name).collect(Collectors.toList()), contains("location")); var source = source(fieldExtract.child()); + assertThat(source.limit(), is(topLimit.limit())); var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); assertThat("Geometry field name", condition.fieldName(), equalTo("location")); assertThat("Spatial relationship", condition.relation(), equalTo(ShapeRelation.INTERSECTS)); From 98e0a4e953d339402c87ed426a70e6cc8320c17f Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 14 Oct 2024 13:03:02 +0300 Subject: [PATCH 08/19] Guard second doc parsing pass with index setting (#114649) * Guard second doc parsing pass with index setting * add test * updates * updates * merge --- .../21_synthetic_source_stored.yml | 49 +++++++++++++++++++ .../common/settings/IndexScopedSettings.java | 1 + .../elasticsearch/index/IndexSettings.java | 21 ++++++++ .../index/mapper/DocumentParserContext.java | 7 ++- .../TransportResumeFollowActionTests.java | 1 + 5 files changed, 78 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml index dfe6c9820a16a..eab51427876aa 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/21_synthetic_source_stored.yml @@ -411,6 +411,55 @@ index param - nested array within array: - match: { hits.hits.0._source.path.to.some.3.id: [ 1000, 2000 ] } +--- +index param - nested array within array - disabled second pass: + - requires: + cluster_features: ["mapper.synthetic_source_keep", "mapper.bwc_workaround_9_0"] + reason: requires tracking ignored source + + - do: + indices.create: + index: test + body: + settings: + index: + synthetic_source: + enable_second_doc_parsing_pass: false + mappings: + _source: + mode: synthetic + properties: + name: + type: keyword + path: + properties: + to: + properties: + some: + synthetic_source_keep: arrays + properties: + id: + type: integer + + - do: + bulk: + index: test + refresh: true + body: + - '{ "create": { } }' + - '{ "name": "A", "path": [ { "to": [ { "some" : [ { "id": 10 }, { "id": [1, 3, 2] } ] }, { "some": { "id": 100 } } ] }, { "to": { "some": { "id": [1000, 2000] } } } ] }' + - match: { errors: false } + + - do: + search: + index: test + sort: name + - match: { hits.hits.0._source.name: A } + - length: { hits.hits.0._source.path.to.some: 2} + - match: { hits.hits.0._source.path.to.some.0.id: 10 } + - match: { hits.hits.0._source.path.to.some.1.id: [ 1, 3, 2] } + + --- # 112156 stored field under object with store_array_source: diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 884ce38fba391..f5276bbe49b63 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -187,6 +187,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { FieldMapper.SYNTHETIC_SOURCE_KEEP_INDEX_SETTING, IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING, IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, + IndexSettings.SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING, SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING, // validate that built-in similarities don't get redefined diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index f3f8ce4b8e7e4..347b44a22e7c0 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -652,6 +652,13 @@ public Iterator> settings() { Property.Final ); + public static final Setting SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING = Setting.boolSetting( + "index.synthetic_source.enable_second_doc_parsing_pass", + true, + Property.IndexScope, + Property.Dynamic + ); + /** * Returns true if TSDB encoding is enabled. The default is true */ @@ -821,6 +828,7 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { private volatile long mappingDimensionFieldsLimit; private volatile boolean skipIgnoredSourceWrite; private volatile boolean skipIgnoredSourceRead; + private volatile boolean syntheticSourceSecondDocParsingPassEnabled; private final SourceFieldMapper.Mode indexMappingSourceMode; /** @@ -982,6 +990,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti es87TSDBCodecEnabled = scopedSettings.get(TIME_SERIES_ES87TSDB_CODEC_ENABLED_SETTING); skipIgnoredSourceWrite = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING); skipIgnoredSourceRead = scopedSettings.get(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); + syntheticSourceSecondDocParsingPassEnabled = scopedSettings.get(SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING); indexMappingSourceMode = scopedSettings.get(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); scopedSettings.addSettingsUpdateConsumer( @@ -1070,6 +1079,10 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti this::setSkipIgnoredSourceWrite ); scopedSettings.addSettingsUpdateConsumer(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING, this::setSkipIgnoredSourceRead); + scopedSettings.addSettingsUpdateConsumer( + SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING, + this::setSyntheticSourceSecondDocParsingPassEnabled + ); } private void setSearchIdleAfter(TimeValue searchIdleAfter) { @@ -1662,6 +1675,14 @@ private void setSkipIgnoredSourceRead(boolean value) { this.skipIgnoredSourceRead = value; } + private void setSyntheticSourceSecondDocParsingPassEnabled(boolean syntheticSourceSecondDocParsingPassEnabled) { + this.syntheticSourceSecondDocParsingPassEnabled = syntheticSourceSecondDocParsingPassEnabled; + } + + public boolean isSyntheticSourceSecondDocParsingPassEnabled() { + return syntheticSourceSecondDocParsingPassEnabled; + } + public SourceFieldMapper.Mode getIndexMappingSourceMode() { return indexMappingSourceMode; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index ac236e5a7e5fd..2eec14bd1a8d6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -111,6 +111,7 @@ public int get() { private final Set ignoredFields; private final List ignoredFieldValues; private final List ignoredFieldsMissingValues; + private final boolean inArrayScopeEnabled; private boolean inArrayScope; private final Map> dynamicMappers; @@ -143,6 +144,7 @@ private DocumentParserContext( Set ignoreFields, List ignoredFieldValues, List ignoredFieldsWithNoSource, + boolean inArrayScopeEnabled, boolean inArrayScope, Map> dynamicMappers, Map dynamicObjectMappers, @@ -164,6 +166,7 @@ private DocumentParserContext( this.ignoredFields = ignoreFields; this.ignoredFieldValues = ignoredFieldValues; this.ignoredFieldsMissingValues = ignoredFieldsWithNoSource; + this.inArrayScopeEnabled = inArrayScopeEnabled; this.inArrayScope = inArrayScope; this.dynamicMappers = dynamicMappers; this.dynamicObjectMappers = dynamicObjectMappers; @@ -188,6 +191,7 @@ private DocumentParserContext(ObjectMapper parent, ObjectMapper.Dynamic dynamic, in.ignoredFields, in.ignoredFieldValues, in.ignoredFieldsMissingValues, + in.inArrayScopeEnabled, in.inArrayScope, in.dynamicMappers, in.dynamicObjectMappers, @@ -219,6 +223,7 @@ protected DocumentParserContext( new HashSet<>(), new ArrayList<>(), new ArrayList<>(), + mappingParserContext.getIndexSettings().isSyntheticSourceSecondDocParsingPassEnabled(), false, new HashMap<>(), new HashMap<>(), @@ -371,7 +376,7 @@ public final Collection getIgnoredFieldsMiss * Applies to synthetic source only. */ public final DocumentParserContext maybeCloneForArray(Mapper mapper) throws IOException { - if (canAddIgnoredField() && mapper instanceof ObjectMapper) { + if (canAddIgnoredField() && mapper instanceof ObjectMapper && inArrayScopeEnabled) { boolean isNested = mapper instanceof NestedObjectMapper; if ((inArrayScope == false && isNested == false) || (inArrayScope && isNested)) { DocumentParserContext subcontext = switchParser(parser()); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java index 357e1bca38e8f..ef03fd0ba6f0e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowActionTests.java @@ -333,6 +333,7 @@ public void testDynamicIndexSettingsAreClassified() { replicatedSettings.add(IndexSettings.MAX_SHINGLE_DIFF_SETTING); replicatedSettings.add(IndexSettings.TIME_SERIES_END_TIME); replicatedSettings.add(IndexSettings.PREFER_ILM_SETTING); + replicatedSettings.add(IndexSettings.SYNTHETIC_SOURCE_SECOND_DOC_PARSING_PASS_SETTING); replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_READ_SETTING); replicatedSettings.add(IgnoredSourceFieldMapper.SKIP_IGNORED_SOURCE_WRITE_SETTING); replicatedSettings.add(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING); From c4118c639f11ae111cea6992376e1b046883d15f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20FOUCRET?= Date: Mon, 14 Oct 2024 12:50:00 +0200 Subject: [PATCH 09/19] Fix termStats posting usage (#114644) --- .../elasticsearch/script/ScriptTermStats.java | 59 +++++++++---------- .../script/ScriptTermStatsTests.java | 18 +++--- 2 files changed, 36 insertions(+), 41 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java index 9dde32cc75e6a..b27019765e33b 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptTermStats.java @@ -12,9 +12,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermStates; -import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.util.CachedSupplier; import org.elasticsearch.features.NodeFeature; @@ -71,17 +70,15 @@ public int uniqueTermsCount() { public int matchedTermsCount() { final int docId = docIdSupplier.getAsInt(); int matchedTerms = 0; + advancePostings(docId); - try { - for (PostingsEnum postingsEnum : postingsSupplier.get()) { - if (postingsEnum != null && postingsEnum.advance(docId) == docId && postingsEnum.freq() > 0) { - matchedTerms++; - } + for (PostingsEnum postingsEnum : postingsSupplier.get()) { + if (postingsEnum != null && postingsEnum.docID() == docId) { + matchedTerms++; } - return matchedTerms; - } catch (IOException e) { - throw new UncheckedIOException(e); } + + return matchedTerms; } /** @@ -150,8 +147,9 @@ public StatsSummary termFreq() { final int docId = docIdSupplier.getAsInt(); try { + advancePostings(docId); for (PostingsEnum postingsEnum : postingsSupplier.get()) { - if (postingsEnum == null || postingsEnum.advance(docId) != docId) { + if (postingsEnum == null || postingsEnum.docID() != docId) { statsSummary.accept(0); } else { statsSummary.accept(postingsEnum.freq()); @@ -170,12 +168,13 @@ public StatsSummary termFreq() { * @return statistics on termPositions for the terms of the query in the current dac */ public StatsSummary termPositions() { - try { - statsSummary.reset(); - int docId = docIdSupplier.getAsInt(); + statsSummary.reset(); + int docId = docIdSupplier.getAsInt(); + try { + advancePostings(docId); for (PostingsEnum postingsEnum : postingsSupplier.get()) { - if (postingsEnum == null || postingsEnum.advance(docId) != docId) { + if (postingsEnum == null || postingsEnum.docID() != docId) { continue; } for (int i = 0; i < postingsEnum.freq(); i++) { @@ -206,25 +205,9 @@ private TermStates[] loadTermContexts() { private PostingsEnum[] loadPostings() { try { PostingsEnum[] postings = new PostingsEnum[terms.length]; - TermStates[] contexts = termContextsSupplier.get(); for (int i = 0; i < terms.length; i++) { - TermStates termStates = contexts[i]; - if (termStates.docFreq() == 0) { - postings[i] = null; - continue; - } - - TermState state = termStates.get(leafReaderContext); - if (state == null) { - postings[i] = null; - continue; - } - - TermsEnum termsEnum = leafReaderContext.reader().terms(terms[i].field()).iterator(); - termsEnum.seekExact(terms[i].bytes(), state); - - postings[i] = termsEnum.postings(null, PostingsEnum.ALL); + postings[i] = leafReaderContext.reader().postings(terms[i], PostingsEnum.POSITIONS); } return postings; @@ -232,4 +215,16 @@ private PostingsEnum[] loadPostings() { throw new UncheckedIOException(e); } } + + private void advancePostings(int targetDocId) { + try { + for (PostingsEnum posting : postingsSupplier.get()) { + if (posting != null && posting.docID() < targetDocId && posting.docID() != DocIdSetIterator.NO_MORE_DOCS) { + posting.advance(targetDocId); + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java index b1b6a11764120..239c90bdee2fd 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java @@ -48,9 +48,9 @@ public void testMatchedTermsCount() throws IOException { // Partial match assertAllDocs( - Set.of(new Term("field", "foo"), new Term("field", "baz")), + Set.of(new Term("field", "foo"), new Term("field", "qux"), new Term("field", "baz")), ScriptTermStats::matchedTermsCount, - Map.of("doc-1", equalTo(1), "doc-2", equalTo(1), "doc-3", equalTo(0)) + Map.of("doc-1", equalTo(2), "doc-2", equalTo(1), "doc-3", equalTo(0)) ); // Always returns 0 when no term is provided. @@ -211,12 +211,12 @@ public void testTermFreq() throws IOException { // With missing terms { assertAllDocs( - Set.of(new Term("field", "foo"), new Term("field", "baz")), + Set.of(new Term("field", "foo"), new Term("field", "qux"), new Term("field", "baz")), ScriptTermStats::termFreq, Map.ofEntries( - Map.entry("doc-1", equalTo(new StatsSummary(2, 1, 0, 1))), - Map.entry("doc-2", equalTo(new StatsSummary(2, 2, 0, 2))), - Map.entry("doc-3", equalTo(new StatsSummary(2, 0, 0, 0))) + Map.entry("doc-1", equalTo(new StatsSummary(3, 2, 0, 1))), + Map.entry("doc-2", equalTo(new StatsSummary(3, 2, 0, 2))), + Map.entry("doc-3", equalTo(new StatsSummary(3, 0, 0, 0))) ) ); } @@ -274,10 +274,10 @@ public void testTermPositions() throws IOException { // With missing terms { assertAllDocs( - Set.of(new Term("field", "foo"), new Term("field", "baz")), + Set.of(new Term("field", "foo"), new Term("field", "qux"), new Term("field", "baz")), ScriptTermStats::termPositions, Map.ofEntries( - Map.entry("doc-1", equalTo(new StatsSummary(1, 1, 1, 1))), + Map.entry("doc-1", equalTo(new StatsSummary(2, 4, 1, 3))), Map.entry("doc-2", equalTo(new StatsSummary(2, 3, 1, 2))), Map.entry("doc-3", equalTo(new StatsSummary())) ) @@ -311,7 +311,7 @@ private void withIndexSearcher(CheckedConsumer consu Document doc = new Document(); doc.add(new TextField("id", "doc-1", Field.Store.YES)); - doc.add(new TextField("field", "foo bar", Field.Store.YES)); + doc.add(new TextField("field", "foo bar qux", Field.Store.YES)); w.addDocument(doc); doc = new Document(); From 8c31d80ae5f73df3d989ffbb4c29e73c44a526a2 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 14 Oct 2024 13:03:47 +0200 Subject: [PATCH 10/19] Node shutdown test integration test (#114582) This change adds a test case that verifies that the node can be shutdown while hosting an index with 0-1 or 0-all auto-expand configuration. --- .../xpack/shutdown/NodeShutdownShardsIT.java | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index 0e162238e96c8..ee7438dfca428 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -375,6 +375,32 @@ public void testAutoExpandDuringReplace() throws Exception { assertIndexSetting("index", "index.number_of_replicas", "1"); } + public void testAutoExpandDuringShutdown() throws Exception { + + var node1 = internalCluster().startNode(); + var node2 = internalCluster().startNode(); + + createIndex("index", indexSettings(1, 0).put("index.auto_expand_replicas", randomFrom("0-all", "0-1")).build()); + indexRandomData("index"); + + ensureGreen("index"); + assertIndexSetting("index", "index.number_of_replicas", "1"); + + var nodeNameToShutdown = randomFrom(node1, node2); + var nodeIdToShutdown = getNodeId(nodeNameToShutdown); + + putNodeShutdown(nodeIdToShutdown, SingleNodeShutdownMetadata.Type.REMOVE, null); + + ensureGreen("index"); + assertIndexSetting("index", "index.number_of_replicas", "0"); + + assertBusy(() -> assertNodeShutdownStatus(nodeIdToShutdown, COMPLETE)); + internalCluster().stopNode(nodeIdToShutdown); + + ensureGreen("index"); + assertIndexSetting("index", "index.number_of_replicas", "0"); + } + public void testNodeShutdownWithUnassignedShards() throws Exception { final String nodeA = internalCluster().startNode(); final String nodeAId = getNodeId(nodeA); From 7157c0a4c4a3638c2d264b42b234491d27ca7557 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 14:01:59 +0200 Subject: [PATCH 11/19] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 277ebb4 (main) (#114409) * Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 277ebb4 * Tweak renovate replace pattern --------- Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Co-authored-by: Rene Groeschke --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- renovate.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 9d78d3229edc1..d80256ee36a17 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -27,7 +27,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:90888b190da54062f67f3fef1372eb0ae7d81ea55f5a1f56d748b13e4853d984", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:277ebb42c458ef39cb4028f9204f0b3d51d8cd628ea737a65696a1143c3e42fe", "-wolfi", "apk" ), diff --git a/renovate.json b/renovate.json index 0a1d588e6332c..293a2bb262375 100644 --- a/renovate.json +++ b/renovate.json @@ -30,7 +30,7 @@ "\\s*\"?(?[^\\s:@\"]+)(?::(?[-a-zA-Z0-9.]+))?(?:@(?sha256:[a-zA-Z0-9]+))?\"?" ], "currentValueTemplate": "{{#if currentValue}}{{{currentValue}}}{{else}}latest{{/if}}", - "autoReplaceStringTemplate": "\"{{{depName}}}{{#if newValue}}:{{{newValue}}}{{/if}}{{#if newDigest}}@{{{newDigest}}}{{/if}}\"", + "autoReplaceStringTemplate": "{{{depName}}}{{#if newValue}}:{{{newValue}}}{{/if}}{{#if newDigest}}@{{{newDigest}}}{{/if}}\"", "datasourceTemplate": "docker" } ] From af35cada9251eb81aff7563c8012cfb93103c18d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Mon, 14 Oct 2024 23:53:26 +1100 Subject: [PATCH 12/19] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/rest-api/usage/line_38} #113694 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 975eb0c434054..2e6fd10c6ef65 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -366,6 +366,9 @@ tests: - class: org.elasticsearch.xpack.inference.integration.ModelRegistryIT method: testGetModel issue: https://github.com/elastic/elasticsearch/issues/114657 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/rest-api/usage/line_38} + issue: https://github.com/elastic/elasticsearch/issues/113694 # Examples: # From 51ea024eda1336a300e2836d1659a42691880b00 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 14 Oct 2024 16:02:24 +0300 Subject: [PATCH 13/19] Introduce CRUD APIs for data stream options (#113945) In this PR we introduce two endpoint PUT and GET to manage the data stream options and consequently the failure store configuration on the data stream level. This means that we can manage the failure store of existing data streams. The APIs look like: ``` # Enable/disable PUT _data_stream/my-data-stream/_options { "failure_store": { "enabled": true } } # Remove existing configuration DELETE _data_stream/my-data-stream/_options # Retrieve GET _data_stream/my-data-stream/_options { "failure_store": { "enabled": true } } ``` Future work: - Document the new APIs - Convert `DataStreamOptionsIT.java` to a yaml test. --- .../datastreams/DataStreamOptionsIT.java | 144 +++++++++++ .../src/main/java/module-info.java | 1 + .../datastreams/DataStreamsPlugin.java | 20 ++ .../action/DeleteDataStreamOptionsAction.java | 108 +++++++++ .../action/GetDataStreamOptionsAction.java | 223 ++++++++++++++++++ .../action/PutDataStreamOptionsAction.java | 165 +++++++++++++ ...ransportDeleteDataStreamOptionsAction.java | 86 +++++++ .../TransportGetDataStreamOptionsAction.java | 104 ++++++++ .../TransportPutDataStreamOptionsAction.java | 92 ++++++++ .../RestDeleteDataStreamOptionsAction.java | 54 +++++ .../rest/RestGetDataStreamOptionsAction.java | 58 +++++ .../rest/RestPutDataStreamOptionsAction.java | 58 +++++ .../metadata/MetadataDataStreamsService.java | 94 ++++++++ .../metadata/DataStreamOptionsTests.java | 11 +- .../MetadataDataStreamsServiceTests.java | 33 +++ .../xpack/security/operator/Constants.java | 3 + 16 files changed, 1253 insertions(+), 1 deletion(-) create mode 100644 modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/DeleteDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/GetDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/PutDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportDeleteDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportGetDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportPutDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestDeleteDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestGetDataStreamOptionsAction.java create mode 100644 modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java new file mode 100644 index 0000000000000..980cc32a12c68 --- /dev/null +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamOptionsIT.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +/** + * This should be a yaml test, but in order to write one we would need to expose the new APIs in the rest-api-spec. + * We do not want to do that until the feature flag is removed. For this reason, we temporarily, test the new APIs here. + * Please convert this to a yaml test when the feature flag is removed. + */ +public class DataStreamOptionsIT extends DisabledSecurityDataStreamTestCase { + + private static final String DATA_STREAM_NAME = "failure-data-stream"; + + @SuppressWarnings("unchecked") + @Before + public void setup() throws IOException { + Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/ds-template"); + putComposableIndexTemplateRequest.setJsonEntity(""" + { + "index_patterns": ["failure-data-stream"], + "template": { + "settings": { + "number_of_replicas": 0 + } + }, + "data_stream": { + "failure_store": true + } + } + """); + assertOK(client().performRequest(putComposableIndexTemplateRequest)); + + assertOK(client().performRequest(new Request("PUT", "/_data_stream/" + DATA_STREAM_NAME))); + // Initialize the failure store. + assertOK(client().performRequest(new Request("POST", DATA_STREAM_NAME + "/_rollover?target_failure_store"))); + ensureGreen(DATA_STREAM_NAME); + + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME)); + List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); + assertThat(dataStreams.size(), is(1)); + Map dataStream = (Map) dataStreams.get(0); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); + List backingIndices = getIndices(dataStream); + assertThat(backingIndices.size(), is(1)); + List failureStore = getFailureStore(dataStream); + assertThat(failureStore.size(), is(1)); + } + + public void testEnableDisableFailureStore() throws IOException { + { + assertAcknowledged(client().performRequest(new Request("DELETE", "/_data_stream/" + DATA_STREAM_NAME + "/_options"))); + assertFailureStore(false, 1); + assertDataStreamOptions(null); + } + { + Request enableRequest = new Request("PUT", "/_data_stream/" + DATA_STREAM_NAME + "/_options"); + enableRequest.setJsonEntity(""" + { + "failure_store": { + "enabled": true + } + }"""); + assertAcknowledged(client().performRequest(enableRequest)); + assertFailureStore(true, 1); + assertDataStreamOptions(true); + } + + { + Request disableRequest = new Request("PUT", "/_data_stream/" + DATA_STREAM_NAME + "/_options"); + disableRequest.setJsonEntity(""" + { + "failure_store": { + "enabled": false + } + }"""); + assertAcknowledged(client().performRequest(disableRequest)); + assertFailureStore(false, 1); + assertDataStreamOptions(false); + } + } + + @SuppressWarnings("unchecked") + private void assertFailureStore(boolean failureStoreEnabled, int failureStoreSize) throws IOException { + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME)); + List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); + assertThat(dataStreams.size(), is(1)); + Map dataStream = (Map) dataStreams.get(0); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); + assertThat(dataStream.containsKey("failure_store"), is(true)); + // Ensure the failure store is set to the provided value + assertThat(((Map) dataStream.get("failure_store")).get("enabled"), equalTo(failureStoreEnabled)); + // And the failure indices preserved + List failureStore = getFailureStore(dataStream); + assertThat(failureStore.size(), is(failureStoreSize)); + } + + @SuppressWarnings("unchecked") + private void assertDataStreamOptions(Boolean failureStoreEnabled) throws IOException { + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME + "/_options")); + List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); + assertThat(dataStreams.size(), is(1)); + Map dataStream = (Map) dataStreams.get(0); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); + Map> options = (Map>) dataStream.get("options"); + if (failureStoreEnabled == null) { + assertThat(options, nullValue()); + } else { + assertThat(options.containsKey("failure_store"), is(true)); + assertThat(options.get("failure_store").get("enabled"), equalTo(failureStoreEnabled)); + } + } + + @SuppressWarnings("unchecked") + private List getFailureStore(Map response) { + var failureStore = (Map) response.get("failure_store"); + return getIndices(failureStore); + + } + + @SuppressWarnings("unchecked") + private List getIndices(Map response) { + List> indices = (List>) response.get("indices"); + return indices.stream().map(index -> index.get("index_name")).toList(); + } +} diff --git a/modules/data-streams/src/main/java/module-info.java b/modules/data-streams/src/main/java/module-info.java index 16229f9eb2394..2d49029c1023c 100644 --- a/modules/data-streams/src/main/java/module-info.java +++ b/modules/data-streams/src/main/java/module-info.java @@ -17,6 +17,7 @@ exports org.elasticsearch.datastreams.action to org.elasticsearch.server; exports org.elasticsearch.datastreams.lifecycle.action to org.elasticsearch.server; exports org.elasticsearch.datastreams.lifecycle; + exports org.elasticsearch.datastreams.options.action to org.elasticsearch.server; provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.datastreams.DataStreamFeatures; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index 1a6465a251021..cb7445705537a 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -56,6 +57,15 @@ import org.elasticsearch.datastreams.lifecycle.rest.RestExplainDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.rest.RestGetDataStreamLifecycleAction; import org.elasticsearch.datastreams.lifecycle.rest.RestPutDataStreamLifecycleAction; +import org.elasticsearch.datastreams.options.action.DeleteDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.GetDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.PutDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.TransportDeleteDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.TransportGetDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.action.TransportPutDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.rest.RestDeleteDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.rest.RestGetDataStreamOptionsAction; +import org.elasticsearch.datastreams.options.rest.RestPutDataStreamOptionsAction; import org.elasticsearch.datastreams.rest.RestCreateDataStreamAction; import org.elasticsearch.datastreams.rest.RestDataStreamsStatsAction; import org.elasticsearch.datastreams.rest.RestDeleteDataStreamAction; @@ -229,6 +239,11 @@ public Collection createComponents(PluginServices services) { actions.add(new ActionHandler<>(DeleteDataStreamLifecycleAction.INSTANCE, TransportDeleteDataStreamLifecycleAction.class)); actions.add(new ActionHandler<>(ExplainDataStreamLifecycleAction.INSTANCE, TransportExplainDataStreamLifecycleAction.class)); actions.add(new ActionHandler<>(GetDataStreamLifecycleStatsAction.INSTANCE, TransportGetDataStreamLifecycleStatsAction.class)); + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + actions.add(new ActionHandler<>(GetDataStreamOptionsAction.INSTANCE, TransportGetDataStreamOptionsAction.class)); + actions.add(new ActionHandler<>(PutDataStreamOptionsAction.INSTANCE, TransportPutDataStreamOptionsAction.class)); + actions.add(new ActionHandler<>(DeleteDataStreamOptionsAction.INSTANCE, TransportDeleteDataStreamOptionsAction.class)); + } return actions; } @@ -261,6 +276,11 @@ public List getRestHandlers( handlers.add(new RestDeleteDataStreamLifecycleAction()); handlers.add(new RestExplainDataStreamLifecycleAction()); handlers.add(new RestDataStreamLifecycleStatsAction()); + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + handlers.add(new RestGetDataStreamOptionsAction()); + handlers.add(new RestPutDataStreamOptionsAction()); + handlers.add(new RestDeleteDataStreamOptionsAction()); + } return handlers; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/DeleteDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/DeleteDataStreamOptionsAction.java new file mode 100644 index 0000000000000..98a29dd636ddf --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/DeleteDataStreamOptionsAction.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +/** + * Removes the data stream options configuration from the requested data streams. + */ +public class DeleteDataStreamOptionsAction { + + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/options/delete"); + + private DeleteDataStreamOptionsAction() {/* no instances */} + + public static final class Request extends AcknowledgedRequest implements IndicesRequest.Replaceable { + + private String[] names; + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder().matchOpen(true).matchClosed(true).allowEmptyExpressions(true).resolveAliases(false) + ) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowAliasToMultipleIndices(false).allowClosedIndices(true)) + .build(); + + public Request(StreamInput in) throws IOException { + super(in); + this.names = in.readOptionalStringArray(); + this.indicesOptions = IndicesOptions.readIndicesOptions(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalStringArray(names); + indicesOptions.writeIndicesOptions(out); + } + + public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String[] names) { + super(masterNodeTimeout, ackTimeout); + this.names = names; + } + + public String[] getNames() { + return names; + } + + @Override + public String[] indices() { + return names; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public Request indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + @Override + public boolean includeDataStreams() { + return true; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Arrays.equals(names, request.names) && Objects.equals(indicesOptions, request.indicesOptions); + } + + @Override + public int hashCode() { + int result = Objects.hash(indicesOptions); + result = 31 * result + Arrays.hashCode(names); + return result; + } + + @Override + public IndicesRequest indices(String... indices) { + this.names = indices; + return this; + } + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/GetDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/GetDataStreamOptionsAction.java new file mode 100644 index 0000000000000..c1354da1129ca --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/GetDataStreamOptionsAction.java @@ -0,0 +1,223 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.cluster.metadata.DataStreamOptions; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * This action retrieves the data stream options from every data stream. Currently, data stream options only support + * failure store. + */ +public class GetDataStreamOptionsAction { + + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/options/get"); + + private GetDataStreamOptionsAction() {/* no instances */} + + public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { + + private String[] names; + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder().matchOpen(true).matchClosed(true).allowEmptyExpressions(true).resolveAliases(false) + ) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowAliasToMultipleIndices(false).allowClosedIndices(true)) + .build(); + private boolean includeDefaults = false; + + public Request(TimeValue masterNodeTimeout, String[] names) { + super(masterNodeTimeout); + this.names = names; + } + + public Request(TimeValue masterNodeTimeout, String[] names, boolean includeDefaults) { + super(masterNodeTimeout); + this.names = names; + this.includeDefaults = includeDefaults; + } + + public String[] getNames() { + return names; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.names = in.readOptionalStringArray(); + this.indicesOptions = IndicesOptions.readIndicesOptions(in); + this.includeDefaults = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalStringArray(names); + indicesOptions.writeIndicesOptions(out); + out.writeBoolean(includeDefaults); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Arrays.equals(names, request.names) + && indicesOptions.equals(request.indicesOptions) + && includeDefaults == request.includeDefaults; + } + + @Override + public int hashCode() { + int result = Objects.hash(indicesOptions, includeDefaults); + result = 31 * result + Arrays.hashCode(names); + return result; + } + + @Override + public String[] indices() { + return names; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public boolean includeDefaults() { + return includeDefaults; + } + + public Request indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + @Override + public boolean includeDataStreams() { + return true; + } + + @Override + public IndicesRequest indices(String... indices) { + this.names = indices; + return this; + } + + public Request includeDefaults(boolean includeDefaults) { + this.includeDefaults = includeDefaults; + return this; + } + } + + public static class Response extends ActionResponse implements ChunkedToXContentObject { + public static final ParseField DATA_STREAMS_FIELD = new ParseField("data_streams"); + + public record DataStreamEntry(String dataStreamName, DataStreamOptions dataStreamOptions) implements Writeable, ToXContentObject { + + public static final ParseField NAME_FIELD = new ParseField("name"); + public static final ParseField OPTIONS_FIELD = new ParseField("options"); + + DataStreamEntry(StreamInput in) throws IOException { + this(in.readString(), in.readOptionalWriteable(DataStreamOptions::read)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(dataStreamName); + out.writeOptionalWriteable(dataStreamOptions); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NAME_FIELD.getPreferredName(), dataStreamName); + if (dataStreamOptions != null && dataStreamOptions.isEmpty() == false) { + builder.field(OPTIONS_FIELD.getPreferredName(), dataStreamOptions); + } + builder.endObject(); + return builder; + } + } + + private final List dataStreams; + + public Response(List dataStreams) { + this.dataStreams = dataStreams; + } + + public Response(StreamInput in) throws IOException { + this(in.readCollectionAsList(DataStreamEntry::new)); + } + + public List getDataStreams() { + return dataStreams; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(dataStreams); + } + + @Override + public Iterator toXContentChunked(ToXContent.Params outerParams) { + return Iterators.concat(Iterators.single((builder, params) -> { + builder.startObject(); + builder.startArray(DATA_STREAMS_FIELD.getPreferredName()); + return builder; + }), + Iterators.map(dataStreams.iterator(), entry -> (builder, params) -> entry.toXContent(builder, outerParams)), + Iterators.single((builder, params) -> { + builder.endArray(); + builder.endObject(); + return builder; + }) + ); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return dataStreams.equals(response.dataStreams); + } + + @Override + public int hashCode() { + return Objects.hash(dataStreams); + } + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/PutDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/PutDataStreamOptionsAction.java new file mode 100644 index 0000000000000..d055a6972312a --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/PutDataStreamOptionsAction.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.metadata.DataStreamFailureStore; +import org.elasticsearch.cluster.metadata.DataStreamOptions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Sets the data stream options that was provided in the request to the requested data streams. + */ +public class PutDataStreamOptionsAction { + + public static final ActionType INSTANCE = new ActionType<>("indices:admin/data_stream/options/put"); + + private PutDataStreamOptionsAction() {/* no instances */} + + public static final class Request extends AcknowledgedRequest implements IndicesRequest.Replaceable { + + public interface Factory { + Request create(@Nullable DataStreamFailureStore dataStreamFailureStore); + } + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "put_data_stream_options_request", + false, + (args, factory) -> factory.create((DataStreamFailureStore) args[0]) + ); + + static { + PARSER.declareObjectOrNull( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> DataStreamFailureStore.PARSER.parse(p, null), + null, + new ParseField("failure_store") + ); + } + + public static Request parseRequest(XContentParser parser, Factory factory) { + return PARSER.apply(parser, factory); + } + + private String[] names; + private IndicesOptions indicesOptions = IndicesOptions.builder() + .concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS) + .wildcardOptions( + IndicesOptions.WildcardOptions.builder().matchOpen(true).matchClosed(true).allowEmptyExpressions(true).resolveAliases(false) + ) + .gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowAliasToMultipleIndices(false).allowClosedIndices(true)) + .build(); + private final DataStreamOptions options; + + public Request(StreamInput in) throws IOException { + super(in); + this.names = in.readStringArray(); + this.indicesOptions = IndicesOptions.readIndicesOptions(in); + options = DataStreamOptions.read(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(names); + indicesOptions.writeIndicesOptions(out); + out.writeWriteable(options); + } + + public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String[] names, DataStreamOptions options) { + super(masterNodeTimeout, ackTimeout); + this.names = names; + this.options = options; + } + + public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String[] names, @Nullable DataStreamFailureStore failureStore) { + super(masterNodeTimeout, ackTimeout); + this.names = names; + this.options = new DataStreamOptions(failureStore); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (options.failureStore() == null) { + validationException = addValidationError("At least one option needs to be provided", validationException); + } + return validationException; + } + + public String[] getNames() { + return names; + } + + public DataStreamOptions getOptions() { + return options; + } + + @Override + public String[] indices() { + return names; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public Request indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + return this; + } + + @Override + public boolean includeDataStreams() { + return true; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Arrays.equals(names, request.names) + && Objects.equals(indicesOptions, request.indicesOptions) + && options.equals(request.options); + } + + @Override + public int hashCode() { + int result = Objects.hash(indicesOptions, options); + result = 31 * result + Arrays.hashCode(names); + return result; + } + + @Override + public IndicesRequest indices(String... names) { + this.names = names; + return this; + } + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportDeleteDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportDeleteDataStreamOptionsAction.java new file mode 100644 index 0000000000000..ead23ed78222b --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportDeleteDataStreamOptionsAction.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetadataDataStreamsService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.List; + +/** + * Transport action that resolves the data stream names from the request and removes any configured data stream options from them. + */ +public class TransportDeleteDataStreamOptionsAction extends AcknowledgedTransportMasterNodeAction { + + private final MetadataDataStreamsService metadataDataStreamsService; + private final SystemIndices systemIndices; + + @Inject + public TransportDeleteDataStreamOptionsAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + MetadataDataStreamsService metadataDataStreamsService, + SystemIndices systemIndices + ) { + super( + DeleteDataStreamOptionsAction.INSTANCE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + DeleteDataStreamOptionsAction.Request::new, + indexNameExpressionResolver, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.metadataDataStreamsService = metadataDataStreamsService; + this.systemIndices = systemIndices; + } + + @Override + protected void masterOperation( + Task task, + DeleteDataStreamOptionsAction.Request request, + ClusterState state, + ActionListener listener + ) { + List dataStreamNames = DataStreamsActionUtil.getDataStreamNames( + indexNameExpressionResolver, + state, + request.getNames(), + request.indicesOptions() + ); + for (String name : dataStreamNames) { + systemIndices.validateDataStreamAccess(name, threadPool.getThreadContext()); + } + metadataDataStreamsService.removeDataStreamOptions(dataStreamNames, request.ackTimeout(), request.masterNodeTimeout(), listener); + } + + @Override + protected ClusterBlockException checkBlock(DeleteDataStreamOptionsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportGetDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportGetDataStreamOptionsAction.java new file mode 100644 index 0000000000000..b032b35c943c0 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportGetDataStreamOptionsAction.java @@ -0,0 +1,104 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Collects the data streams from the cluster state and then returns for each data stream its name and its + * data stream options. Currently, data stream options include only the failure store configuration. + */ +public class TransportGetDataStreamOptionsAction extends TransportMasterNodeReadAction< + GetDataStreamOptionsAction.Request, + GetDataStreamOptionsAction.Response> { + + private final SystemIndices systemIndices; + + @Inject + public TransportGetDataStreamOptionsAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + SystemIndices systemIndices + ) { + super( + GetDataStreamOptionsAction.INSTANCE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + GetDataStreamOptionsAction.Request::new, + indexNameExpressionResolver, + GetDataStreamOptionsAction.Response::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.systemIndices = systemIndices; + } + + @Override + protected void masterOperation( + Task task, + GetDataStreamOptionsAction.Request request, + ClusterState state, + ActionListener listener + ) { + List requestedDataStreams = DataStreamsActionUtil.getDataStreamNames( + indexNameExpressionResolver, + state, + request.getNames(), + request.indicesOptions() + ); + Map dataStreams = state.metadata().dataStreams(); + for (String name : requestedDataStreams) { + systemIndices.validateDataStreamAccess(name, threadPool.getThreadContext()); + } + listener.onResponse( + new GetDataStreamOptionsAction.Response( + requestedDataStreams.stream() + .map(dataStreams::get) + .filter(Objects::nonNull) + .map( + dataStream -> new GetDataStreamOptionsAction.Response.DataStreamEntry( + dataStream.getName(), + dataStream.getDataStreamOptions() + ) + ) + .sorted(Comparator.comparing(GetDataStreamOptionsAction.Response.DataStreamEntry::dataStreamName)) + .toList() + ) + ); + } + + @Override + protected ClusterBlockException checkBlock(GetDataStreamOptionsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportPutDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportPutDataStreamOptionsAction.java new file mode 100644 index 0000000000000..b1386232c44f9 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/action/TransportPutDataStreamOptionsAction.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.DataStreamsActionUtil; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetadataDataStreamsService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.List; + +/** + * Transport action that resolves the data stream names from the request and sets the data stream lifecycle provided in the request. + */ +public class TransportPutDataStreamOptionsAction extends AcknowledgedTransportMasterNodeAction { + + private final MetadataDataStreamsService metadataDataStreamsService; + private final SystemIndices systemIndices; + + @Inject + public TransportPutDataStreamOptionsAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + MetadataDataStreamsService metadataDataStreamsService, + SystemIndices systemIndices + ) { + super( + PutDataStreamOptionsAction.INSTANCE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + PutDataStreamOptionsAction.Request::new, + indexNameExpressionResolver, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.metadataDataStreamsService = metadataDataStreamsService; + this.systemIndices = systemIndices; + } + + @Override + protected void masterOperation( + Task task, + PutDataStreamOptionsAction.Request request, + ClusterState state, + ActionListener listener + ) { + List dataStreamNames = DataStreamsActionUtil.getDataStreamNames( + indexNameExpressionResolver, + state, + request.getNames(), + request.indicesOptions() + ); + for (String name : dataStreamNames) { + systemIndices.validateDataStreamAccess(name, threadPool.getThreadContext()); + } + metadataDataStreamsService.setDataStreamOptions( + dataStreamNames, + request.getOptions(), + request.ackTimeout(), + request.masterNodeTimeout(), + listener + ); + } + + @Override + protected ClusterBlockException checkBlock(PutDataStreamOptionsAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestDeleteDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestDeleteDataStreamOptionsAction.java new file mode 100644 index 0000000000000..96460632ff443 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestDeleteDataStreamOptionsAction.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.rest; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.datastreams.options.action.DeleteDataStreamOptionsAction; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; + +@ServerlessScope(Scope.INTERNAL) +public class RestDeleteDataStreamOptionsAction extends BaseRestHandler { + + @Override + public String getName() { + return "delete_data_stream_options_action"; + } + + @Override + public List routes() { + return List.of(new Route(DELETE, "/_data_stream/{name}/_options")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + final var deleteDataOptionsRequest = new DeleteDataStreamOptionsAction.Request( + getMasterNodeTimeout(request), + request.paramAsTime("timeout", AcknowledgedRequest.DEFAULT_ACK_TIMEOUT), + Strings.splitStringByCommaToArray(request.param("name")) + ); + deleteDataOptionsRequest.indicesOptions(IndicesOptions.fromRequest(request, deleteDataOptionsRequest.indicesOptions())); + return channel -> client.execute( + DeleteDataStreamOptionsAction.INSTANCE, + deleteDataOptionsRequest, + new RestToXContentListener<>(channel) + ); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestGetDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestGetDataStreamOptionsAction.java new file mode 100644 index 0000000000000..6d6530efce1b9 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestGetDataStreamOptionsAction.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.rest; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.datastreams.options.action.GetDataStreamOptionsAction; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +@ServerlessScope(Scope.PUBLIC) +public class RestGetDataStreamOptionsAction extends BaseRestHandler { + + @Override + public String getName() { + return "get_data_stream_options_action"; + } + + @Override + public List routes() { + return List.of(new Route(GET, "/_data_stream/{name}/_options")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + GetDataStreamOptionsAction.Request getDataStreamOptionsRequest = new GetDataStreamOptionsAction.Request( + RestUtils.getMasterNodeTimeout(request), + Strings.splitStringByCommaToArray(request.param("name")) + ); + getDataStreamOptionsRequest.includeDefaults(request.paramAsBoolean("include_defaults", false)); + getDataStreamOptionsRequest.indicesOptions(IndicesOptions.fromRequest(request, getDataStreamOptionsRequest.indicesOptions())); + return channel -> client.execute( + GetDataStreamOptionsAction.INSTANCE, + getDataStreamOptionsRequest, + new RestRefCountedChunkedToXContentListener<>(channel) + ); + } + + @Override + public boolean allowSystemIndexAccessByDefault() { + return true; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java new file mode 100644 index 0000000000000..9191b96b6039e --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.datastreams.options.rest; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.datastreams.options.action.PutDataStreamOptionsAction; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.rest.RestUtils.getAckTimeout; +import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; + +@ServerlessScope(Scope.PUBLIC) +public class RestPutDataStreamOptionsAction extends BaseRestHandler { + + @Override + public String getName() { + return "put_data_stream_options_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/_data_stream/{name}/_options")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + try (XContentParser parser = request.contentParser()) { + PutDataStreamOptionsAction.Request putOptionsRequest = PutDataStreamOptionsAction.Request.parseRequest( + parser, + (failureStore) -> new PutDataStreamOptionsAction.Request( + getMasterNodeTimeout(request), + getAckTimeout(request), + Strings.splitStringByCommaToArray(request.param("name")), + failureStore + ) + ); + putOptionsRequest.indicesOptions(IndicesOptions.fromRequest(request, putOptionsRequest.indicesOptions())); + return channel -> client.execute(PutDataStreamOptionsAction.INSTANCE, putOptionsRequest, new RestToXContentListener<>(channel)); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index 8a46550f8a689..db3973c1a15a8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -45,6 +45,7 @@ public class MetadataDataStreamsService { private final DataStreamGlobalRetentionSettings globalRetentionSettings; private final MasterServiceTaskQueue updateLifecycleTaskQueue; private final MasterServiceTaskQueue setRolloverOnWriteTaskQueue; + private final MasterServiceTaskQueue updateOptionsTaskQueue; public MetadataDataStreamsService( ClusterService clusterService, @@ -93,6 +94,20 @@ public Tuple executeTask( Priority.NORMAL, rolloverOnWriteExecutor ); + ClusterStateTaskExecutor updateOptionsExecutor = new SimpleBatchedAckListenerTaskExecutor<>() { + + @Override + public Tuple executeTask( + UpdateOptionsTask modifyOptionsTask, + ClusterState clusterState + ) { + return new Tuple<>( + updateDataStreamOptions(clusterState, modifyOptionsTask.getDataStreamNames(), modifyOptionsTask.getOptions()), + modifyOptionsTask + ); + } + }; + this.updateOptionsTaskQueue = clusterService.createTaskQueue("modify-data-stream-options", Priority.NORMAL, updateOptionsExecutor); } public void modifyDataStream(final ModifyDataStreamsAction.Request request, final ActionListener listener) { @@ -147,6 +162,39 @@ public void removeLifecycle( ); } + /** + * Submits the task to set the provided data stream options to the requested data streams. + */ + public void setDataStreamOptions( + final List dataStreamNames, + DataStreamOptions options, + TimeValue ackTimeout, + TimeValue masterTimeout, + final ActionListener listener + ) { + updateOptionsTaskQueue.submitTask( + "set-data-stream-options", + new UpdateOptionsTask(dataStreamNames, options, ackTimeout, listener), + masterTimeout + ); + } + + /** + * Submits the task to remove the data stream options from the requested data streams. + */ + public void removeDataStreamOptions( + List dataStreamNames, + TimeValue ackTimeout, + TimeValue masterTimeout, + ActionListener listener + ) { + updateOptionsTaskQueue.submitTask( + "delete-data-stream-options", + new UpdateOptionsTask(dataStreamNames, null, ackTimeout, listener), + masterTimeout + ); + } + @SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) { clusterService.submitUnbatchedStateUpdateTask(source, task); @@ -228,6 +276,24 @@ ClusterState updateDataLifecycle(ClusterState currentState, List dataStr return ClusterState.builder(currentState).metadata(builder.build()).build(); } + /** + * Creates an updated cluster state in which the requested data streams have the data stream options provided. + * Visible for testing. + */ + ClusterState updateDataStreamOptions( + ClusterState currentState, + List dataStreamNames, + @Nullable DataStreamOptions dataStreamOptions + ) { + Metadata metadata = currentState.metadata(); + Metadata.Builder builder = Metadata.builder(metadata); + for (var dataStreamName : dataStreamNames) { + var dataStream = validateDataStream(metadata, dataStreamName); + builder.put(dataStream.copy().setDataStreamOptions(dataStreamOptions).build()); + } + return ClusterState.builder(currentState).metadata(builder.build()).build(); + } + /** * Creates an updated cluster state in which the requested data stream has the flag {@link DataStream#rolloverOnWrite()} * set to the value of the parameter rolloverOnWrite @@ -372,6 +438,34 @@ public DataStreamLifecycle getDataLifecycle() { } } + /** + * A cluster state update task that consists of the cluster state request and the listeners that need to be notified upon completion. + */ + static class UpdateOptionsTask extends AckedBatchedClusterStateUpdateTask { + + private final List dataStreamNames; + private final DataStreamOptions options; + + UpdateOptionsTask( + List dataStreamNames, + @Nullable DataStreamOptions options, + TimeValue ackTimeout, + ActionListener listener + ) { + super(ackTimeout, listener); + this.dataStreamNames = dataStreamNames; + this.options = options; + } + + public List getDataStreamNames() { + return dataStreamNames; + } + + public DataStreamOptions getOptions() { + return options; + } + } + /** * A cluster state update task that consists of the cluster state request and the listeners that need to be notified upon completion. */ diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java index 020955d226a0f..9b0eb93b496a4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java @@ -24,7 +24,16 @@ protected Writeable.Reader instanceReader() { @Override protected DataStreamOptions createTestInstance() { - return new DataStreamOptions(randomBoolean() ? null : DataStreamFailureStoreTests.randomFailureStore()); + return randomDataStreamOptions(); + } + + public static DataStreamOptions randomDataStreamOptions() { + return switch (randomIntBetween(0, 2)) { + case 0 -> DataStreamOptions.EMPTY; + case 1 -> DataStreamOptions.FAILURE_STORE_DISABLED; + case 2 -> DataStreamOptions.FAILURE_STORE_ENABLED; + default -> throw new IllegalArgumentException("Illegal randomisation branch"); + }; } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java index 92c1103c950c0..276c20d2d1322 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java @@ -422,6 +422,39 @@ public void testUpdateLifecycle() { } } + public void testUpdateDataStreamOptions() { + String dataStream = randomAlphaOfLength(5); + // we want the data stream options to be non-empty, so we can see the removal in action + DataStreamOptions dataStreamOptions = randomValueOtherThan( + DataStreamOptions.EMPTY, + DataStreamOptionsTests::randomDataStreamOptions + ); + ClusterState before = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>(dataStream, 2)), List.of()); + MetadataDataStreamsService service = new MetadataDataStreamsService( + mock(ClusterService.class), + mock(IndicesService.class), + DataStreamGlobalRetentionSettings.create(ClusterSettings.createBuiltInClusterSettings()) + ); + + // Ensure no data stream options are stored + DataStream updatedDataStream = before.metadata().dataStreams().get(dataStream); + assertNotNull(updatedDataStream); + assertThat(updatedDataStream.getDataStreamOptions(), equalTo(DataStreamOptions.EMPTY)); + + // Set non-empty data stream options + ClusterState after = service.updateDataStreamOptions(before, List.of(dataStream), dataStreamOptions); + updatedDataStream = after.metadata().dataStreams().get(dataStream); + assertNotNull(updatedDataStream); + assertThat(updatedDataStream.getDataStreamOptions(), equalTo(dataStreamOptions)); + before = after; + + // Remove data stream options + after = service.updateDataStreamOptions(before, List.of(dataStream), null); + updatedDataStream = after.metadata().dataStreams().get(dataStream); + assertNotNull(updatedDataStream); + assertThat(updatedDataStream.getDataStreamOptions(), equalTo(DataStreamOptions.EMPTY)); + } + private MapperService getMapperService(IndexMetadata im) { try { String mapping = im.mapping().source().toString(); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index d791873eb3142..b29dc0fa410b6 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -502,6 +502,9 @@ public class Constants { "indices:admin/data_stream/lifecycle/get", "indices:admin/data_stream/lifecycle/put", "indices:admin/data_stream/lifecycle/explain", + "indices:admin/data_stream/options/delete", + "indices:admin/data_stream/options/get", + "indices:admin/data_stream/options/put", "indices:admin/delete", "indices:admin/flush", "indices:admin/flush[s]", From 7bd6f2ce6a708364a41b1d1620a08df3f8816258 Mon Sep 17 00:00:00 2001 From: kosabogi <105062005+kosabogi@users.noreply.github.com> Date: Mon, 14 Oct 2024 15:57:00 +0200 Subject: [PATCH 14/19] Expands semantic_text tutorial with hybrid search (#114398) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Creates a new page for the hybrid search tutorial * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Adds search response example * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó * Update docs/reference/search/search-your-data/semantic-text-hybrid-search Co-authored-by: István Zoltán Szabó --------- Co-authored-by: István Zoltán Szabó --- .../search-your-data/semantic-search.asciidoc | 1 + .../semantic-text-hybrid-search | 254 ++++++++++++++++++ 2 files changed, 255 insertions(+) create mode 100644 docs/reference/search/search-your-data/semantic-text-hybrid-search diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index 62e41b3eef3de..0ef8591e42b5d 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -104,6 +104,7 @@ IMPORTANT: For the easiest way to perform semantic search in the {stack}, refer include::semantic-search-semantic-text.asciidoc[] +include::semantic-text-hybrid-search[] include::semantic-search-inference.asciidoc[] include::semantic-search-elser.asciidoc[] include::cohere-es.asciidoc[] diff --git a/docs/reference/search/search-your-data/semantic-text-hybrid-search b/docs/reference/search/search-your-data/semantic-text-hybrid-search new file mode 100644 index 0000000000000..c56b283434df5 --- /dev/null +++ b/docs/reference/search/search-your-data/semantic-text-hybrid-search @@ -0,0 +1,254 @@ +[[semantic-text-hybrid-search]] +=== Tutorial: hybrid search with `semantic_text` +++++ +Hybrid search with `semantic_text` +++++ + +This tutorial demonstrates how to perform hybrid search, combining semantic search with traditional full-text search. + +In hybrid search, semantic search retrieves results based on the meaning of the text, while full-text search focuses on exact word matches. By combining both methods, hybrid search delivers more relevant results, particularly in cases where relying on a single approach may not be sufficient. + +The recommended way to use hybrid search in the {stack} is following the `semantic_text` workflow. This tutorial uses the <> for demonstration, but you can use any service and its supported models offered by the {infer-cap} API. + +[discrete] +[[semantic-text-hybrid-infer-endpoint]] +==== Create the {infer} endpoint + +Create an inference endpoint by using the <>: + +[source,console] +------------------------------------------------------------ +PUT _inference/sparse_embedding/my-elser-endpoint <1> +{ + "service": "elser", <2> + "service_settings": { + "adaptive_allocations": { <3> + "enabled": true, + "min_number_of_allocations": 3, + "max_number_of_allocations": 10 + }, + "num_threads": 1 + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The task type is `sparse_embedding` in the path as the `elser` service will +be used and ELSER creates sparse vectors. The `inference_id` is +`my-elser-endpoint`. +<2> The `elser` service is used in this example. +<3> This setting enables and configures adaptive allocations. +Adaptive allocations make it possible for ELSER to automatically scale up or down resources based on the current load on the process. + +[NOTE] +==== +You might see a 502 bad gateway error in the response when using the {kib} Console. +This error usually just reflects a timeout, while the model downloads in the background. +You can check the download progress in the {ml-app} UI. +==== + +[discrete] +[[hybrid-search-create-index-mapping]] +==== Create an index mapping for hybrid search + +The destination index will contain both the embeddings for semantic search and the original text field for full-text search. This structure enables the combination of semantic search and full-text search. + +[source,console] +------------------------------------------------------------ +PUT semantic-embeddings +{ + "mappings": { + "properties": { + "semantic_text": { <1> + "type": "semantic_text", + "inference_id": "my-elser-endpoint" <2> + }, + "content": { <3> + "type": "text", + "copy_to": "semantic_text" <4> + } + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The name of the field to contain the generated embeddings for semantic search. +<2> The identifier of the inference endpoint that generates the embeddings based on the input text. +<3> The name of the field to contain the original text for lexical search. +<4> The textual data stored in the `content` field will be copied to `semantic_text` and processed by the {infer} endpoint. + +[NOTE] +==== +If you want to run a search on indices that were populated by web crawlers or connectors, you have to +<> for these indices to +include the `semantic_text` field. Once the mapping is updated, you'll need to run a full web crawl or a full connector sync. This ensures that all existing +documents are reprocessed and updated with the new semantic embeddings, enabling hybrid search on the updated data. +==== + +[discrete] +[[semantic-text-hybrid-load-data]] +==== Load data + +In this step, you load the data that you later use to create embeddings from. + +Use the `msmarco-passagetest2019-top1000` data set, which is a subset of the MS MARCO Passage Ranking data set. It consists of 200 queries, each accompanied by a list of relevant text passages. All unique passages, along with their IDs, have been extracted from that data set and compiled into a https://github.com/elastic/stack-docs/blob/main/docs/en/stack/ml/nlp/data/msmarco-passagetest2019-unique.tsv[tsv file]. + +Download the file and upload it to your cluster using the {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[Data Visualizer] in the {ml-app} UI. After your data is analyzed, click **Override settings**. Under **Edit field names**, assign `id` to the first column and `content` to the second. Click **Apply**, then **Import**. Name the index `test-data`, and click **Import**. After the upload is complete, you will see an index named `test-data` with 182,469 documents. + +[discrete] +[[hybrid-search-reindex-data]] +==== Reindex the data for hybrid search + +Reindex the data from the `test-data` index into the `semantic-embeddings` index. +The data in the `content` field of the source index is copied into the `content` field of the destination index. +The `copy_to` parameter set in the index mapping creation ensures that the content is copied into the `semantic_text` field. The data is processed by the {infer} endpoint at ingest time to generate embeddings. + +[NOTE] +==== +This step uses the reindex API to simulate data ingestion. If you are working with data that has already been indexed, +rather than using the `test-data` set, reindexing is still required to ensure that the data is processed by the {infer} endpoint +and the necessary embeddings are generated. +==== + +[source,console] +------------------------------------------------------------ +POST _reindex?wait_for_completion=false +{ + "source": { + "index": "test-data", + "size": 10 <1> + }, + "dest": { + "index": "semantic-embeddings" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The default batch size for reindexing is 1000. Reducing size to a smaller +number makes the update of the reindexing process quicker which enables you to +follow the progress closely and detect errors early. + +The call returns a task ID to monitor the progress: + +[source,console] +------------------------------------------------------------ +GET _tasks/ +------------------------------------------------------------ +// TEST[skip:TBD] + +Reindexing large datasets can take a long time. You can test this workflow using only a subset of the dataset. + +To cancel the reindexing process and generate embeddings for the subset that was reindexed: + +[source,console] +------------------------------------------------------------ +POST _tasks//_cancel +------------------------------------------------------------ +// TEST[skip:TBD] + +[discrete] +[[hybrid-search-perform-search]] +==== Perform hybrid search + +After reindexing the data into the `semantic-embeddings` index, you can perform hybrid search by using <>. RRF is a technique that merges the rankings from both semantic and lexical queries, giving more weight to results that rank high in either search. This ensures that the final results are balanced and relevant. + +[source,console] +------------------------------------------------------------ +GET semantic-embeddings/_search +{ + "retriever": { + "rrf": { + "retrievers": [ + { + "standard": { <1> + "query": { + "match": { + "content": "How to avoid muscle soreness while running?" <2> + } + } + } + }, + { + "standard": { <3> + "query": { + "semantic": { + "field": "semantic_text", <4> + "query": "How to avoid muscle soreness while running?" + } + } + } + } + ] + } + } +} +------------------------------------------------------------ +// TEST[skip:TBD] +<1> The first `standard` retriever represents the traditional lexical search. +<2> Lexical search is performed on the `content` field using the specified phrase. +<3> The second `standard` retriever refers to the semantic search. +<4> The `semantic_text` field is used to perform the semantic search. + + +After performing the hybrid search, the query will return the top 10 documents that match both semantic and lexical search criteria. The results include detailed information about each document: + +[source,console-result] +------------------------------------------------------------ +{ + "took": 107, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 473, + "relation": "eq" + }, + "max_score": null, + "hits": [ + { + "_index": "semantic-embeddings", + "_id": "wv65epIBEMBRnhfTsOFM", + "_score": 0.032786883, + "_rank": 1, + "_source": { + "semantic_text": { + "inference": { + "inference_id": "my-elser-endpoint", + "model_settings": { + "task_type": "sparse_embedding" + }, + "chunks": [ + { + "text": "What so many out there do not realize is the importance of what you do after you work out. You may have done the majority of the work, but how you treat your body in the minutes and hours after you exercise has a direct effect on muscle soreness, muscle strength and growth, and staying hydrated. Cool Down. After your last exercise, your workout is not over. The first thing you need to do is cool down. Even if running was all that you did, you still should do light cardio for a few minutes. This brings your heart rate down at a slow and steady pace, which helps you avoid feeling sick after a workout.", + "embeddings": { + "exercise": 1.571044, + "after": 1.3603843, + "sick": 1.3281639, + "cool": 1.3227621, + "muscle": 1.2645415, + "sore": 1.2561599, + "cooling": 1.2335974, + "running": 1.1750668, + "hours": 1.1104802, + "out": 1.0991782, + "##io": 1.0794281, + "last": 1.0474665, + (...) + } + } + ] + } + }, + "id": 8408852, + "content": "What so many out there do not realize is the importance of (...)" + } + } + ] + } +} +------------------------------------------------------------ +// NOTCONSOLE From 4c15cc077887d00ecf0e02c39b42cf01874ab6c4 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Mon, 14 Oct 2024 17:08:23 +0300 Subject: [PATCH 15/19] Add ResolvedExpression wrapper (#114592) **Introduction** > In order to make adoption of failure stores simpler for all users, we are introducing a new syntactical feature to index expression resolution: The selector. > > Selectors, denoted with a :: followed by a recognized suffix will allow users to specify which component of an index abstraction they would like to operate on within an API call. In this case, an index abstraction is a concrete index, data stream, or alias; Any abstraction that can be resolved to a set of indices/shards. We define a component of an index abstraction to be some searchable unit of the index abstraction. > > To start, we will support two components: data and failures. Concrete indices are their own data components, while the data component for index aliases are all of the indices contained therein. For data streams, the data component corresponds to their backing indices. Data stream aliases mirror this, treating all backing indices of the data streams they correspond to as their data component. > > The failure component is only supported by data streams and data stream aliases. The failure component of these abstractions refer to the data streams' failure stores. Indices and index aliases do not have a failure component. For more details and examples see https://github.com/elastic/elasticsearch/pull/113144. All this work has been cherry picked from there. **Purpose of this PR** This PR is introducing a wrapper around the resolved expression that used to be a `String` to create the base on which the selectors are going to be added. The current PR is just a refactoring and does not and should not change any existing behaviour. --- .../TransportClusterSearchShardsAction.java | 3 +- .../indices/resolve/ResolveIndexAction.java | 9 +- .../query/TransportValidateQueryAction.java | 3 +- .../explain/TransportExplainAction.java | 3 +- .../action/search/TransportSearchAction.java | 24 +- .../search/TransportSearchShardsAction.java | 6 +- .../metadata/IndexNameExpressionResolver.java | 196 +++++++----- .../elasticsearch/indices/IndicesService.java | 3 +- .../elasticsearch/search/SearchService.java | 3 +- .../indices/resolve/ResolveIndexTests.java | 15 +- .../DateMathExpressionResolverTests.java | 89 +++--- .../cluster/metadata/ExpressionListTests.java | 108 ++++--- .../IndexNameExpressionResolverTests.java | 65 ++-- .../WildcardExpressionResolverTests.java | 299 ++++++++++-------- .../indices/IndicesServiceTests.java | 34 +- 15 files changed, 504 insertions(+), 356 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java index 9ffef1f178f44..b855f2cee7613 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/TransportClusterSearchShardsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; @@ -84,7 +85,7 @@ protected void masterOperation( String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices()); Map indicesAndFilters = new HashMap<>(); - Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); for (String index : concreteIndices) { final AliasFilter aliasFilter = indicesService.buildAliasFilter(clusterState, index, indicesAndAliases); final String[] aliases = indexNameExpressionResolver.indexAliases( diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index 5c5c71bc002b3..f5c100b7884bb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -565,8 +566,8 @@ static void resolveIndices( if (names.length == 1 && (Metadata.ALL.equals(names[0]) || Regex.isMatchAllPattern(names[0]))) { names = new String[] { "**" }; } - Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); - for (String s : resolvedIndexAbstractions) { + Set resolvedIndexAbstractions = resolver.resolveExpressions(clusterState, indicesOptions, true, names); + for (ResolvedExpression s : resolvedIndexAbstractions) { enrichIndexAbstraction(clusterState, s, indices, aliases, dataStreams); } indices.sort(Comparator.comparing(ResolvedIndexAbstraction::getName)); @@ -597,12 +598,12 @@ private static void mergeResults( private static void enrichIndexAbstraction( ClusterState clusterState, - String indexAbstraction, + ResolvedExpression indexAbstraction, List indices, List aliases, List dataStreams ) { - IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction); + IndexAbstraction ia = clusterState.metadata().getIndicesLookup().get(indexAbstraction.resource()); if (ia != null) { switch (ia.getType()) { case CONCRETE_INDEX -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 4e9830fe0d14e..e01f364712676 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; @@ -133,7 +134,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener @Override protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) { final ClusterState clusterState = clusterService.state(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, request.indices()); final AliasFilter aliasFilter = searchService.buildAliasFilter(clusterState, shard.getIndexName(), indicesAndAliases); return new ShardValidateQueryRequest(shard.shardId(), aliasFilter, request); } diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 9c82d032014f2..84c6df7b8a66f 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; @@ -109,7 +110,7 @@ protected boolean resolveIndex(ExplainRequest request) { @Override protected void resolveRequest(ClusterState state, InternalRequest request) { - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(state, request.request().index()); final AliasFilter aliasFilter = searchService.buildAliasFilter(state, request.concreteIndex(), indicesAndAliases); request.request().filteringAlias(aliasFilter); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 1645a378446a4..b5864f64a7824 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; @@ -110,6 +111,7 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; +import java.util.stream.Collectors; import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; @@ -203,7 +205,7 @@ public TransportSearchAction( private Map buildPerIndexOriginalIndices( ClusterState clusterState, - Set indicesAndAliases, + Set indicesAndAliases, String[] indices, IndicesOptions indicesOptions ) { @@ -211,6 +213,9 @@ private Map buildPerIndexOriginalIndices( var blocks = clusterState.blocks(); // optimization: mostly we do not have any blocks so there's no point in the expensive per-index checking boolean hasBlocks = blocks.global().isEmpty() == false || blocks.indices().isEmpty() == false; + // Get a distinct set of index abstraction names present from the resolved expressions to help with the reverse resolution from + // concrete index to the expression that produced it. + Set indicesAndAliasesResources = indicesAndAliases.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); for (String index : indices) { if (hasBlocks) { blocks.indexBlockedRaiseException(ClusterBlockLevel.READ, index); @@ -227,8 +232,8 @@ private Map buildPerIndexOriginalIndices( String[] finalIndices = Strings.EMPTY_ARRAY; if (aliases == null || aliases.length == 0 - || indicesAndAliases.contains(index) - || hasDataStreamRef(clusterState, indicesAndAliases, index)) { + || indicesAndAliasesResources.contains(index) + || hasDataStreamRef(clusterState, indicesAndAliasesResources, index)) { finalIndices = new String[] { index }; } if (aliases != null) { @@ -247,7 +252,11 @@ private static boolean hasDataStreamRef(ClusterState clusterState, Set i return indicesAndAliases.contains(ret.getParentDataStream().getName()); } - Map buildIndexAliasFilters(ClusterState clusterState, Set indicesAndAliases, Index[] concreteIndices) { + Map buildIndexAliasFilters( + ClusterState clusterState, + Set indicesAndAliases, + Index[] concreteIndices + ) { final Map aliasFilterMap = new HashMap<>(); for (Index index : concreteIndices) { clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index.getName()); @@ -1237,7 +1246,10 @@ private void executeSearch( } else { final Index[] indices = resolvedIndices.getConcreteLocalIndices(); concreteLocalIndices = Arrays.stream(indices).map(Index::getName).toArray(String[]::new); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( + clusterState, + searchRequest.indices() + ); aliasFilter = buildIndexAliasFilters(clusterState, indicesAndAliases, indices); aliasFilter.putAll(remoteAliasMap); localShardIterators = getLocalShardsIterator( @@ -1810,7 +1822,7 @@ List getLocalShardsIterator( ClusterState clusterState, SearchRequest searchRequest, String clusterAlias, - Set indicesAndAliases, + Set indicesAndAliases, String[] concreteIndices ) { var routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java index f418b5617b2a1..b94bd95c93d8a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchShardsAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.index.Index; @@ -127,7 +128,10 @@ public void searchShards(Task task, SearchShardsRequest searchShardsRequest, Act searchService.getRewriteContext(timeProvider::absoluteStartMillis, resolvedIndices, null), listener.delegateFailureAndWrap((delegate, searchRequest) -> { Index[] concreteIndices = resolvedIndices.getConcreteLocalIndices(); - final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions(clusterState, searchRequest.indices()); + final Set indicesAndAliases = indexNameExpressionResolver.resolveExpressions( + clusterState, + searchRequest.indices() + ); final Map aliasFilters = transportSearchAction.buildIndexAliasFilters( clusterState, indicesAndAliases, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 2229166a2d779..eaf54034b22e0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -74,6 +74,15 @@ public IndexNameExpressionResolver(ThreadContext threadContext, SystemIndices sy this.systemIndices = Objects.requireNonNull(systemIndices, "System Indices must not be null"); } + /** + * This contains the resolved expression in the form of the resource. + * Soon it will facilitate the index component selector. + * @param resource the resolved resolvedExpression + */ + public record ResolvedExpression(String resource) { + + } + /** * Same as {@link #concreteIndexNames(ClusterState, IndicesOptions, String...)}, but the index expressions and options * are encapsulated in the specified request. @@ -191,8 +200,9 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressions(context, indexExpressions); return expressions.stream() + .map(ResolvedExpression::resource) .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) .filter(ia -> ia.getType() == Type.DATA_STREAM) @@ -221,10 +231,11 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressions(context, request.index()); if (expressions.size() == 1) { - IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); + ResolvedExpression resolvedExpression = expressions.iterator().next(); + IndexAbstraction ia = state.metadata().getIndicesLookup().get(resolvedExpression.resource()); if (ia.getType() == Type.ALIAS) { Index writeIndex = ia.getWriteIndex(); if (writeIndex == null) { @@ -246,14 +257,14 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { + protected static Collection resolveExpressions(Context context, String... expressions) { if (context.getOptions().expandWildcardExpressions() == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); } else { return ExplicitResourceNameFilter.filterUnavailable( context, - DateMathExpressionResolver.resolve(context, List.of(expressions)) + DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList()) ); } } else { @@ -264,7 +275,10 @@ protected static Collection resolveExpressions(Context context, String.. } else { return WildcardExpressionResolver.resolve( context, - ExplicitResourceNameFilter.filterUnavailable(context, DateMathExpressionResolver.resolve(context, List.of(expressions))) + ExplicitResourceNameFilter.filterUnavailable( + context, + DateMathExpressionResolver.resolve(context, Arrays.stream(expressions).map(ResolvedExpression::new).toList()) + ) ); } } @@ -339,12 +353,12 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressions(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); - for (String expression : expressions) { - final IndexAbstraction indexAbstraction = indicesLookup.get(expression); + for (ResolvedExpression resolvedExpression : expressions) { + final IndexAbstraction indexAbstraction = indicesLookup.get(resolvedExpression.resource()); assert indexAbstraction != null; if (indexAbstraction.getType() == Type.ALIAS && context.isResolveToWriteIndex()) { Index writeIndex = indexAbstraction.getWriteIndex(); @@ -378,7 +392,7 @@ Index[] concreteIndices(Context context, String... indexExpressions) { throw new IllegalArgumentException( indexAbstraction.getType().getDisplayName() + " [" - + expression + + resolvedExpression.resource() + "] has more than one index associated with it " + Arrays.toString(indexNames) + ", can't execute a single index op" @@ -642,7 +656,7 @@ public Index concreteSingleIndex(ClusterState state, IndicesRequest request) { * Utility method that allows to resolve an index expression to its corresponding single write index. * * @param state the cluster state containing all the data to resolve to expression to a concrete index - * @param request The request that defines how the an alias or an index need to be resolved to a concrete index + * @param request The request that defines how an alias or an index need to be resolved to a concrete index * and the expression that can be resolved to an alias or an index name. * @throws IllegalArgumentException if the index resolution does not lead to an index, or leads to more than one index * @return the write index obtained as a result of the index resolution @@ -734,7 +748,7 @@ public static String resolveDateMathExpression(String dateExpression, long time) /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ - public Set resolveExpressions(ClusterState state, String... expressions) { + public Set resolveExpressions(ClusterState state, String... expressions) { return resolveExpressions(state, IndicesOptions.lenientExpandOpen(), false, expressions); } @@ -743,7 +757,7 @@ public Set resolveExpressions(ClusterState state, String... expressions) * If {@param preserveDataStreams} is {@code true}, datastreams that are covered by the wildcards from the * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. */ - public Set resolveExpressions( + public Set resolveExpressions( ClusterState state, IndicesOptions indicesOptions, boolean preserveDataStreams, @@ -760,10 +774,10 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); - if (resolved instanceof Set) { + Collection resolved = resolveExpressions(context, expressions); + if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items - return Collections.unmodifiableSet((Set) resolved); + return Collections.unmodifiableSet((Set) resolved); } else { return Set.copyOf(resolved); } @@ -776,7 +790,7 @@ public Set resolveExpressions( * the index itself - null is returned. Returns {@code null} if no filtering is required. * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. */ - public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { + public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); } @@ -802,39 +816,39 @@ public String[] indexAliases( Predicate requiredAlias, Predicate requiredDataStreamAlias, boolean skipIdentity, - Set resolvedExpressions + Set resolvedExpressions ) { - if (isAllIndices(resolvedExpressions)) { + if (isAllIndicesExpression(resolvedExpressions)) { return null; } - + Set resources = resolvedExpressions.stream().map(ResolvedExpression::resource).collect(Collectors.toSet()); final IndexMetadata indexMetadata = state.metadata().getIndices().get(index); if (indexMetadata == null) { // Shouldn't happen throw new IndexNotFoundException(index); } - if (skipIdentity == false && resolvedExpressions.contains(index)) { + if (skipIdentity == false && resources.contains(index)) { return null; } IndexAbstraction ia = state.metadata().getIndicesLookup().get(index); DataStream dataStream = ia.getParentDataStream(); if (dataStream != null) { - if (skipIdentity == false && resolvedExpressions.contains(dataStream.getName())) { + if (skipIdentity == false && resources.contains(dataStream.getName())) { // skip the filters when the request targets the data stream name return null; } Map dataStreamAliases = state.metadata().dataStreamAliases(); List aliasesForDataStream; - if (iterateIndexAliases(dataStreamAliases.size(), resolvedExpressions.size())) { + if (iterateIndexAliases(dataStreamAliases.size(), resources.size())) { aliasesForDataStream = dataStreamAliases.values() .stream() - .filter(dataStreamAlias -> resolvedExpressions.contains(dataStreamAlias.getName())) + .filter(dataStreamAlias -> resources.contains(dataStreamAlias.getName())) .filter(dataStreamAlias -> dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); } else { - aliasesForDataStream = resolvedExpressions.stream() + aliasesForDataStream = resources.stream() .map(dataStreamAliases::get) .filter(dataStreamAlias -> dataStreamAlias != null && dataStreamAlias.getDataStreams().contains(dataStream.getName())) .toList(); @@ -859,18 +873,15 @@ public String[] indexAliases( } else { final Map indexAliases = indexMetadata.getAliases(); final AliasMetadata[] aliasCandidates; - if (iterateIndexAliases(indexAliases.size(), resolvedExpressions.size())) { + if (iterateIndexAliases(indexAliases.size(), resources.size())) { // faster to iterate indexAliases aliasCandidates = indexAliases.values() .stream() - .filter(aliasMetadata -> resolvedExpressions.contains(aliasMetadata.alias())) + .filter(aliasMetadata -> resources.contains(aliasMetadata.alias())) .toArray(AliasMetadata[]::new); } else { // faster to iterate resolvedExpressions - aliasCandidates = resolvedExpressions.stream() - .map(indexAliases::get) - .filter(Objects::nonNull) - .toArray(AliasMetadata[]::new); + aliasCandidates = resources.stream().map(indexAliases::get).filter(Objects::nonNull).toArray(AliasMetadata[]::new); } List aliases = null; for (AliasMetadata aliasMetadata : aliasCandidates) { @@ -909,12 +920,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); - - // TODO: it appears that this can never be true? - if (isAllIndices(resolvedExpressions)) { - return resolveSearchRoutingAllIndices(state.metadata(), routing); - } + final Collection resolvedExpressions = resolveExpressions(context, expressions); Map> routings = null; Set paramRouting = null; @@ -924,8 +930,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab paramRouting = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); } - for (String expression : resolvedExpressions) { - IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(resolvedExpression.resource); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { for (Index index : indexAbstraction.getIndices()) { String concreteIndex = index.getName(); @@ -963,7 +969,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab } } else { // Index - routings = collectRoutings(routings, paramRouting, norouting, expression); + routings = collectRoutings(routings, paramRouting, norouting, resolvedExpression.resource()); } } @@ -1009,6 +1015,17 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m return null; } + /** + * Identifies whether the array containing index names given as argument refers to all indices + * The empty or null array identifies all indices + * + * @param aliasesOrIndices the array containing index names + * @return true if the provided array maps to all indices, false otherwise + */ + public static boolean isAllIndicesExpression(Collection aliasesOrIndices) { + return isAllIndices(aliasesOrIndices.stream().map(ResolvedExpression::resource).toList()); + } + /** * Identifies whether the array containing index names given as argument refers to all indices * The empty or null array identifies all indices @@ -1249,8 +1266,8 @@ private WildcardExpressionResolver() { * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. * Depending on the context, returns the names of the datastreams themselves or their backing indices. */ - public static Collection resolveAll(Context context) { - List concreteIndices = resolveEmptyOrTrivialWildcard(context); + public static Collection resolveAll(Context context) { + List concreteIndices = resolveEmptyOrTrivialWildcard(context); if (context.includeDataStreams() == false && context.getOptions().ignoreAliases()) { return concreteIndices; @@ -1265,7 +1282,7 @@ public static Collection resolveAll(Context context) { .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); + Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1293,17 +1310,17 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres * ultimately returned, instead of the alias or datastream name * */ - public static Collection resolve(Context context, List expressions) { + public static Collection resolve(Context context, List expressions) { ExpressionList expressionList = new ExpressionList(context, expressions); // fast exit if there are no wildcards to evaluate if (expressionList.hasWildcard() == false) { return expressions; } - Set result = new HashSet<>(); + Set result = new HashSet<>(); for (ExpressionList.Expression expression : expressionList) { if (expression.isWildcard()) { Stream matchingResources = matchResourcesToWildcard(context, expression.get()); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); + Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); AtomicBoolean emptyWildcardExpansion = new AtomicBoolean(false); if (context.getOptions().allowNoIndices() == false) { emptyWildcardExpansion.set(true); @@ -1319,9 +1336,9 @@ public static Collection resolve(Context context, List expressio } } else { if (expression.isExclusion()) { - result.remove(expression.get()); + result.remove(new ResolvedExpression(expression.get())); } else { - result.add(expression.get()); + result.add(expression.resolvedExpression()); } } } @@ -1412,13 +1429,13 @@ private static Map filterIndicesLookupForSuffixWildcar * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Stream expandToOpenClosed(Context context, Stream resources) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); return resources.flatMap(indexAbstraction -> { if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(indexAbstraction.getName()); + return Stream.of(new ResolvedExpression(indexAbstraction.getName())); } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(indexAbstraction.getName()); + return Stream.of(new ResolvedExpression(indexAbstraction.getName())); } else { Stream indicesStateStream = Stream.of(); if (shouldIncludeRegularIndices(context.getOptions())) { @@ -1434,18 +1451,20 @@ private static Stream expandToOpenClosed(Context context, Stream indexMeta.getState() != excludeState); } - return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); + return indicesStateStream.map(indexMeta -> new ResolvedExpression(indexMeta.getIndex().getName())); } }); } - private static List resolveEmptyOrTrivialWildcard(Context context) { + private static List resolveEmptyOrTrivialWildcard(Context context) { final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices(context.getOptions(), context.getState().metadata()); + Stream result; if (context.systemIndexAccessLevel == SystemIndexAccessLevel.ALL) { - return List.of(allIndices); + result = Arrays.stream(allIndices); } else { - return resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices); + result = resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(context, allIndices).stream(); } + return result.map(ResolvedExpression::new).toList(); } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { @@ -1507,8 +1526,8 @@ private DateMathExpressionResolver() { // utility class } - public static List resolve(Context context, List expressions) { - List result = new ArrayList<>(expressions.size()); + public static List resolve(Context context, List expressions) { + List result = new ArrayList<>(expressions.size()); for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { result.add(resolveExpression(expression, context::getStartTime)); } @@ -1519,13 +1538,15 @@ static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static String resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { + static ResolvedExpression resolveExpression(ExpressionList.Expression expression, LongSupplier getTime) { + String result; if (expression.isExclusion()) { // accepts date-math exclusions that are of the form "-<...{}>", i.e. the "-" is outside the "<>" date-math template - return "-" + resolveExpression(expression.get(), getTime); + result = "-" + resolveExpression(expression.get(), getTime); } else { - return resolveExpression(expression.get(), getTime); + result = resolveExpression(expression.get(), getTime); } + return new ResolvedExpression(result); } static String resolveExpression(String expression, LongSupplier getTime) { @@ -1687,25 +1708,26 @@ private ExplicitResourceNameFilter() { * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. */ - public static List filterUnavailable(Context context, List expressions) { + public static List filterUnavailable(Context context, List expressions) { ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - List result = new ArrayList<>(expressions.size()); + List result = new ArrayList<>(expressions.size()); for (ExpressionList.Expression expression : new ExpressionList(context, expressions)) { validateAliasOrIndex(expression); - if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression.get())) { - result.add(expression.expression()); + if (expression.isWildcard() || expression.isExclusion() || ensureAliasOrIndexExists(context, expression)) { + result.add(expression.resolvedExpression()); } } return result; } /** - * This returns `true` if the given {@param name} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of + * This returns `true` if the given {@param resolvedExpression} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unavailable` option is `true`, or, if `false`, it throws a "not found" type of * exception. */ @Nullable - private static boolean ensureAliasOrIndexExists(Context context, String name) { + private static boolean ensureAliasOrIndexExists(Context context, ExpressionList.Expression expression) { + String name = expression.get(); boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); if (indexAbstraction == null) { @@ -1737,32 +1759,37 @@ private static boolean ensureAliasOrIndexExists(Context context, String name) { } private static void validateAliasOrIndex(ExpressionList.Expression expression) { - if (Strings.isEmpty(expression.expression())) { - throw notFoundException(expression.expression()); + if (Strings.isEmpty(expression.resolvedExpression().resource())) { + throw notFoundException(expression.get()); } // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown // if the expression can't be found. - if (expression.expression().charAt(0) == '_') { - throw new InvalidIndexNameException(expression.expression(), "must not start with '_'."); + if (expression.resolvedExpression().resource().charAt(0) == '_') { + throw new InvalidIndexNameException(expression.get(), "must not start with '_'."); } } - private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { + private static void ensureRemoteIndicesRequireIgnoreUnavailable( + IndicesOptions options, + List resolvedExpressions + ) { if (options.ignoreUnavailable()) { return; } - for (String index : indexExpressions) { + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + var index = resolvedExpression.resource(); if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); + failOnRemoteIndicesNotIgnoringUnavailable(resolvedExpressions); } } } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { + private static void failOnRemoteIndicesNotIgnoringUnavailable(List resolvedExpressions) { List crossClusterIndices = new ArrayList<>(); - for (String index : indexExpressions) { + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + String index = resolvedExpression.resource(); if (RemoteClusterAware.isRemoteIndexName(index)) { crossClusterIndices.add(index); } @@ -1780,13 +1807,13 @@ public static final class ExpressionList implements Iterable expressionsList; private final boolean hasWildcard; - public record Expression(String expression, boolean isWildcard, boolean isExclusion) { + public record Expression(ResolvedExpression resolvedExpression, boolean isWildcard, boolean isExclusion) { public String get() { if (isExclusion()) { // drop the leading "-" if exclusion because it is easier for callers to handle it like this - return expression().substring(1); + return resolvedExpression().resource().substring(1); } else { - return expression(); + return resolvedExpression().resource(); } } } @@ -1795,16 +1822,17 @@ public String get() { * Creates the expression iterable that can be used to easily check which expression item is a wildcard or an exclusion (or both). * The {@param context} is used to check if wildcards ought to be considered or not. */ - public ExpressionList(Context context, List expressionStrings) { - List expressionsList = new ArrayList<>(expressionStrings.size()); + public ExpressionList(Context context, List resolvedExpressions) { + List expressionsList = new ArrayList<>(resolvedExpressions.size()); boolean wildcardSeen = false; - for (String expressionString : expressionStrings) { + for (ResolvedExpression resolvedExpression : resolvedExpressions) { + var expressionString = resolvedExpression.resource(); boolean isExclusion = expressionString.startsWith("-") && wildcardSeen; if (context.getOptions().expandWildcardExpressions() && isWildcard(expressionString)) { wildcardSeen = true; - expressionsList.add(new Expression(expressionString, true, isExclusion)); + expressionsList.add(new Expression(resolvedExpression, true, isExclusion)); } else { - expressionsList.add(new Expression(expressionString, false, isExclusion)); + expressionsList.add(new Expression(resolvedExpression, false, isExclusion)); } } this.expressionsList = expressionsList; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 706f788e8a310..2dc5e7c28ad0b 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -38,6 +38,7 @@ import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; @@ -1713,7 +1714,7 @@ interface IndexDeletionAllowedPredicate { IndexSettings indexSettings) -> canDeleteIndexContents(index); private final IndexDeletionAllowedPredicate ALWAYS_TRUE = (Index index, IndexSettings indexSettings) -> true; - public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { + public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { /* Being static, parseAliasFilter doesn't have access to whatever guts it needs to parse a query. Instead of passing in a bunch * of dependencies we pass in a function that can perform the parsing. */ CheckedFunction filterParser = bytes -> { diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index be96b4e25d841..3a900a8a9b8a6 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; @@ -1618,7 +1619,7 @@ public boolean isForceExecution() { } } - public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { + public AliasFilter buildAliasFilter(ClusterState state, String index, Set resolvedExpressions) { return indicesService.buildAliasFilter(state, index, resolvedExpressions); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java index 834bacd9e6a04..1faeabb6acbf7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -229,9 +230,19 @@ public void testResolveHiddenProperlyWithDateMath() { .metadata(buildMetadata(new Object[][] {}, indices)) .build(); String[] requestedIndex = new String[] { "" }; - Set resolvedIndices = resolver.resolveExpressions(clusterState, IndicesOptions.LENIENT_EXPAND_OPEN, true, requestedIndex); + Set resolvedIndices = resolver.resolveExpressions( + clusterState, + IndicesOptions.LENIENT_EXPAND_OPEN, + true, + requestedIndex + ); assertThat(resolvedIndices.size(), is(1)); - assertThat(resolvedIndices, contains(oneOf("logs-pgsql-prod-" + todaySuffix, "logs-pgsql-prod-" + tomorrowSuffix))); + assertThat( + resolvedIndices, + contains( + oneOf(new ResolvedExpression("logs-pgsql-prod-" + todaySuffix), new ResolvedExpression("logs-pgsql-prod-" + tomorrowSuffix)) + ) + ); } public void testSystemIndexAccess() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 6be5b48f9d723..fe0b7926229cb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -26,7 +27,6 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Locale; @@ -52,11 +52,11 @@ private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { public void testNormal() throws Exception { int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); + List indexExpressions = new ArrayList<>(numIndexExpressions); for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(randomAlphaOfLength(10)); + indexExpressions.add(new ResolvedExpression(randomAlphaOfLength(10))); } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(indexExpressions.size())); for (int i = 0; i < indexExpressions.size(); i++) { assertThat(result.get(i), equalTo(indexExpressions.get(i))); @@ -64,25 +64,25 @@ public void testNormal() throws Exception { } public void testExpression() throws Exception { - List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List indexExpressions = resolvedExpressions("<.marvel-{now}>", "<.watch_history-{now}>", ""); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat(result.size(), equalTo(3)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(1).resource(), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(2).resource(), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = Arrays.asList( + List indexExpressions = resolvedExpressions( "<-before-inner-{now}>", "-", "", "<-after-inner-{now}>", "-" ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); + List result = DateMathExpressionResolver.resolve(context, indexExpressions); assertThat( - result, + result.stream().map(ResolvedExpression::resource).toList(), Matchers.contains( equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion @@ -98,7 +98,7 @@ public void testExpressionWithWildcardAndExclusions() { ); result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); assertThat( - result, + result.stream().map(ResolvedExpression::resource).toList(), Matchers.contains( equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion @@ -112,21 +112,24 @@ public void testExpressionWithWildcardAndExclusions() { } public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); + List result = DateMathExpressionResolver.resolve(context, List.of()); assertThat(result.size(), equalTo(0)); } public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); + List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-test>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-test")); + assertThat(result.get(0).resource(), equalTo(".marvel-test")); } public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); + List result = DateMathExpressionResolver.resolve( + context, + resolvedExpressions("<.text1-{now/d}-text2-{now/M}>") + ); assertThat(result.size(), equalTo(1)); assertThat( - result.get(0), + result.get(0).resource(), equalTo( ".text1-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) @@ -137,33 +140,42 @@ public void testExpression_MultiParts() throws Exception { } public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); + List results = DateMathExpressionResolver.resolve( + context, + resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd}}>") + ); assertThat(results.size(), equalTo(1)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); + List result = DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar\\{v\\}el-{now/d}>")); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0).resource(), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); } public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); + List result = DateMathExpressionResolver.resolve( + context, + resolvedExpressions("<.marvel-{now/d{'\\{year\\}'yyyy}}>") + ); assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + assertThat(result.get(0).resource(), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); } public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( + List result = DateMathExpressionResolver.resolve( context, - Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") + resolvedExpressions("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") ); assertThat(result.size(), equalTo(4)); - assertThat(result.get(0), equalTo("name1")); - assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("name2")); - assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); + assertThat(result.get(0).resource(), equalTo("name1")); + assertThat(result.get(1).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + assertThat(result.get(2).resource(), equalTo("name2")); + assertThat( + result.get(3).resource(), + equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1))) + ); } public void testExpression_CustomTimeZoneInIndexName() throws Exception { @@ -202,19 +214,19 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { name -> false, name -> false ); - List results = DateMathExpressionResolver.resolve( + List results = DateMathExpressionResolver.resolve( context, - Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + resolvedExpressions("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") ); assertThat(results.size(), equalTo(1)); logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + assertThat(results.get(0).resource(), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } public void testExpressionInvalidUnescaped() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.mar}vel-{now/d}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); @@ -223,7 +235,7 @@ public void testExpressionInvalidUnescaped() throws Exception { public void testExpressionInvalidDateMathFormat() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); @@ -232,7 +244,7 @@ public void testExpressionInvalidDateMathFormat() throws Exception { public void testExpressionInvalidEmptyDateMathFormat() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d{}}>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); @@ -241,10 +253,13 @@ public void testExpressionInvalidEmptyDateMathFormat() throws Exception { public void testExpressionInvalidOpenEnded() throws Exception { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolve(context, resolvedExpressions("<.marvel-{now/d>")) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } + private List resolvedExpressions(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java index 1ca59ff402bd8..1df3bf4132b60 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ExpressionListTests.java @@ -13,10 +13,12 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ExpressionList.Expression; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; +import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.function.Supplier; @@ -39,10 +41,13 @@ public void testEmpty() { public void testExplicitSingleNameExpression() { for (IndicesOptions indicesOptions : List.of(getExpandWildcardsIndicesOptions(), getNoExpandWildcardsIndicesOptions())) { for (String expressionString : List.of("non_wildcard", "-non_exclusion")) { - ExpressionList expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); + ExpressionList expressionList = new ExpressionList( + getContextWithOptions(indicesOptions), + resolvedExpressions(expressionString) + ); assertThat(expressionList.hasWildcard(), is(false)); if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(indicesOptions), List.of(expressionString)); + expressionList = new ExpressionList(getContextWithOptions(indicesOptions), resolvedExpressions((expressionString))); } Iterator expressionIterator = expressionList.iterator(); assertThat(expressionIterator.hasNext(), is(true)); @@ -62,11 +67,14 @@ public void testWildcardSingleExpression() { for (String wildcardTest : List.of("*", "a*", "*b", "a*b", "a-*b", "a*-b", "-*", "-a*", "-*b", "**", "*-*")) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), - List.of(wildcardTest) + resolvedExpressions(wildcardTest) ); assertThat(expressionList.hasWildcard(), is(true)); if (randomBoolean()) { - expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), List.of(wildcardTest)); + expressionList = new ExpressionList( + getContextWithOptions(getExpandWildcardsIndicesOptions()), + resolvedExpressions(wildcardTest) + ); } Iterator expressionIterator = expressionList.iterator(); assertThat(expressionIterator.hasNext(), is(true)); @@ -82,13 +90,13 @@ public void testWildcardSingleExpression() { } public void testWildcardLongerExpression() { - List onlyExplicits = randomList(7, () -> randomAlphaOfLengthBetween(0, 5)); - String wildcard = randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**"); - List expressionList = new ArrayList<>(onlyExplicits.size() + 1); + List onlyExplicits = randomList(7, () -> new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); + ResolvedExpression wildcard = new ResolvedExpression(randomFrom("*", "*b", "-*", "*-", "c*", "a*b", "**")); + List expressionList = new ArrayList<>(onlyExplicits.size() + 1); expressionList.addAll(randomSubsetOf(onlyExplicits)); int wildcardPos = expressionList.size(); expressionList.add(wildcard); - for (String item : onlyExplicits) { + for (ResolvedExpression item : onlyExplicits) { if (expressionList.contains(item) == false) { expressionList.add(item); } @@ -106,18 +114,18 @@ public void testWildcardLongerExpression() { } else { assertThat(expression.isWildcard(), is(true)); } - assertThat(expression.get(), is(expressionList.get(i++))); + assertThat(expression.get(), is(expressionList.get(i++).resource())); } } public void testWildcardsNoExclusionExpressions() { - for (List wildcardExpression : List.of( - List.of("*"), - List.of("a", "*"), - List.of("-b", "*c"), - List.of("-", "a", "c*"), - List.of("*", "a*", "*b"), - List.of("-*", "a", "b*") + for (List wildcardExpression : List.of( + resolvedExpressions("*"), + resolvedExpressions("a", "*"), + resolvedExpressions("-b", "*c"), + resolvedExpressions("-", "a", "c*"), + resolvedExpressions("*", "a*", "*b"), + resolvedExpressions("-*", "a", "b*") )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), @@ -130,25 +138,25 @@ public void testWildcardsNoExclusionExpressions() { int i = 0; for (Expression expression : expressionList) { assertThat(expression.isExclusion(), is(false)); - if (wildcardExpression.get(i).contains("*")) { + if (wildcardExpression.get(i).resource().contains("*")) { assertThat(expression.isWildcard(), is(true)); } else { assertThat(expression.isWildcard(), is(false)); } - assertThat(expression.get(), is(wildcardExpression.get(i++))); + assertThat(expression.get(), is(wildcardExpression.get(i++).resource())); } } } public void testWildcardExpressionNoExpandOptions() { - for (List wildcardExpression : List.of( - List.of("*"), - List.of("a", "*"), - List.of("-b", "*c"), - List.of("*d", "-"), - List.of("*", "-*"), - List.of("-", "a", "c*"), - List.of("*", "a*", "*b") + for (List wildcardExpression : List.of( + resolvedExpressions("*"), + resolvedExpressions("a", "*"), + resolvedExpressions("-b", "*c"), + resolvedExpressions("*d", "-"), + resolvedExpressions("*", "-*"), + resolvedExpressions("-", "a", "c*"), + resolvedExpressions("*", "a*", "*b") )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getNoExpandWildcardsIndicesOptions()), @@ -162,7 +170,7 @@ public void testWildcardExpressionNoExpandOptions() { for (Expression expression : expressionList) { assertThat(expression.isWildcard(), is(false)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(wildcardExpression.get(i++))); + assertThat(expression.get(), is(wildcardExpression.get(i++).resource())); } } } @@ -172,17 +180,17 @@ public void testSingleExclusionExpression() { int wildcardPos = randomIntBetween(0, 3); String exclusion = randomFrom("-*", "-", "-c*", "-ab", "--"); int exclusionPos = randomIntBetween(wildcardPos + 1, 7); - List exclusionExpression = new ArrayList<>(); + List exclusionExpression = new ArrayList<>(); for (int i = 0; i < wildcardPos; i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); + exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); } - exclusionExpression.add(wildcard); + exclusionExpression.add(new ResolvedExpression(wildcard)); for (int i = wildcardPos + 1; i < exclusionPos; i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); + exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); } - exclusionExpression.add(exclusion); + exclusionExpression.add(new ResolvedExpression(exclusion)); for (int i = 0; i < randomIntBetween(0, 3); i++) { - exclusionExpression.add(randomAlphaOfLengthBetween(0, 5)); + exclusionExpression.add(new ResolvedExpression(randomAlphaOfLengthBetween(0, 5))); } ExpressionList expressionList = new ExpressionList(getContextWithOptions(getExpandWildcardsIndicesOptions()), exclusionExpression); if (randomBoolean()) { @@ -193,28 +201,28 @@ public void testSingleExclusionExpression() { if (i == wildcardPos) { assertThat(expression.isWildcard(), is(true)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++))); + assertThat(expression.get(), is(exclusionExpression.get(i++).resource())); } else if (i == exclusionPos) { assertThat(expression.isExclusion(), is(true)); - assertThat(expression.isWildcard(), is(exclusionExpression.get(i).contains("*"))); - assertThat(expression.get(), is(exclusionExpression.get(i++).substring(1))); + assertThat(expression.isWildcard(), is(exclusionExpression.get(i).resource().contains("*"))); + assertThat(expression.get(), is(exclusionExpression.get(i++).resource().substring(1))); } else { assertThat(expression.isWildcard(), is(false)); assertThat(expression.isExclusion(), is(false)); - assertThat(expression.get(), is(exclusionExpression.get(i++))); + assertThat(expression.get(), is(exclusionExpression.get(i++).resource())); } } } public void testExclusionsExpression() { - for (Tuple, List> exclusionExpression : List.of( - new Tuple<>(List.of("-a", "*", "-a"), List.of(false, false, true)), - new Tuple<>(List.of("-b*", "c", "-a"), List.of(false, false, true)), - new Tuple<>(List.of("*d", "-", "*b"), List.of(false, true, false)), - new Tuple<>(List.of("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), - new Tuple<>(List.of("*-", "-*", "a", "-b"), List.of(false, true, false, true)), - new Tuple<>(List.of("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), - new Tuple<>(List.of("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) + for (Tuple, List> exclusionExpression : List.of( + new Tuple<>(resolvedExpressions("-a", "*", "-a"), List.of(false, false, true)), + new Tuple<>(resolvedExpressions("-b*", "c", "-a"), List.of(false, false, true)), + new Tuple<>(resolvedExpressions("*d", "-", "*b"), List.of(false, true, false)), + new Tuple<>(resolvedExpressions("-", "--", "-*", "", "-*"), List.of(false, false, false, false, true)), + new Tuple<>(resolvedExpressions("*-", "-*", "a", "-b"), List.of(false, true, false, true)), + new Tuple<>(resolvedExpressions("a", "-b", "-*", "-b", "*", "-b"), List.of(false, false, false, true, false, true)), + new Tuple<>(resolvedExpressions("-a", "*d", "-a", "-*b", "-b", "--"), List.of(false, false, true, true, true, true)) )) { ExpressionList expressionList = new ExpressionList( getContextWithOptions(getExpandWildcardsIndicesOptions()), @@ -227,11 +235,11 @@ public void testExclusionsExpression() { for (Expression expression : expressionList) { boolean isExclusion = exclusionExpression.v2().get(i); assertThat(expression.isExclusion(), is(isExclusion)); - assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).contains("*"))); + assertThat(expression.isWildcard(), is(exclusionExpression.v1().get(i).resource().contains("*"))); if (isExclusion) { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++).substring(1))); + assertThat(expression.get(), is(exclusionExpression.v1().get(i++).resource().substring(1))); } else { - assertThat(expression.get(), is(exclusionExpression.v1().get(i++))); + assertThat(expression.get(), is(exclusionExpression.v1().get(i++).resource())); } } } @@ -306,4 +314,8 @@ private Context getContextWithOptions(IndicesOptions indicesOptions) { when(context.getOptions()).thenReturn(indicesOptions); return context; } + + private List resolvedExpressions(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 5f55d203e00e4..bddbe259e0ef3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -1580,16 +1581,27 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); assertEquals( - new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), + Set.of(new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")), + indexNameExpressionResolver.resolveExpressions(state, "alias-*") + ); + assertEquals( + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") ); assertEquals( - new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), + Set.of( + new ResolvedExpression("test-0"), + new ResolvedExpression("test-1"), + new ResolvedExpression("alias-0"), + new ResolvedExpression("alias-1") + ), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); + assertEquals( + Set.of(new ResolvedExpression("test-1"), new ResolvedExpression("alias-1")), + indexNameExpressionResolver.resolveExpressions(state, "*-1") + ); } public void testFilteringAliases() { @@ -1598,16 +1610,25 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); + Set resolvedExpressions = Set.of(new ResolvedExpression("alias-0"), new ResolvedExpression("alias-1")); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); + resolvedExpressions = Set.of( + new ResolvedExpression("test-0"), + new ResolvedExpression("alias-0"), + new ResolvedExpression("alias-1") + ); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); + resolvedExpressions = Set.of( + new ResolvedExpression("test-0"), + new ResolvedExpression("test-1"), + new ResolvedExpression("alias-0"), + new ResolvedExpression("alias-1") + ); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1621,7 +1642,7 @@ public void testIndexAliases() { .putAlias(AliasMetadata.builder("test-alias-non-filtering")) ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "test-*"); String[] strings = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); Arrays.sort(strings); @@ -1656,28 +1677,28 @@ public void testIndexAliasesDataStreamAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { // Only resolve aliases with with that refer to dataStreamName1 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs", "logs_bar")); } { // Only resolve aliases with with that refer to dataStreamName2 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, true, resolvedExpressions); assertThat(result, arrayContainingInAnyOrder("logs_baz", "logs_baz2")); } { // Null is returned, because skipping identity check and resolvedExpressions contains the backing index name - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex2.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases(state, index, x -> true, x -> true, false, resolvedExpressions); assertThat(result, nullValue()); } { // Null is returned, because the wildcard expands to a list of aliases containing an unfiltered alias for dataStreamName1 - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "l*"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1691,7 +1712,7 @@ public void testIndexAliasesDataStreamAliases() { } { // Null is returned, because an unfiltered alias is targeting the same data stream - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "logs_bar", "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, "logs_bar", "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1705,7 +1726,7 @@ public void testIndexAliasesDataStreamAliases() { } { // The filtered alias is returned because although we target the data stream name, skipIdentity is true - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1719,7 +1740,7 @@ public void testIndexAliasesDataStreamAliases() { } { // Null is returned because we target the data stream name and skipIdentity is false - Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); + Set resolvedExpressions = indexNameExpressionResolver.resolveExpressions(state, dataStreamName1, "logs"); String index = backingIndex1.getIndex().getName(); String[] result = indexNameExpressionResolver.indexAliases( state, @@ -1742,13 +1763,13 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); + Set resolvedExpressions = Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); assertArrayEquals(new String[] { "test-alias" }, aliases); - resolvedExpressions = Collections.singleton("other-alias"); + resolvedExpressions = Collections.singleton(new ResolvedExpression("other-alias")); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertArrayEquals(new String[] { "other-alias" }, aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1769,7 +1790,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1851,7 +1872,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1889,7 +1910,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1925,7 +1946,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1966,7 +1987,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias")) ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 982394ca31b1c..25ed5fb2bdab2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; @@ -20,13 +21,13 @@ import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Predicate; +import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -50,50 +51,52 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX"))), + equalTo(resolvedExpressionsSet("testXXX")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), - equalTo(newHashSet("testXXX", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "testYYY"))), + equalTo(resolvedExpressionsSet("testXXX", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), - equalTo(newHashSet("testXXX", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "ku*"))), + equalTo(resolvedExpressionsSet("testXXX", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), - equalTo(newHashSet("testXXX", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*", "kuku"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY", "kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*", "-kuku"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( newHashSet( IndexNameExpressionResolver.WildcardExpressionResolver.resolve( context, - Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") + resolvedExpressions("testX*", "-doe", "-testXXX", "-testYYY") ) ), - equalTo(newHashSet("testXYY")) + equalTo(resolvedExpressionsSet("testXYY")) ); if (indicesOptions == IndicesOptions.lenientExpandOpen()) { assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), - equalTo(newHashSet("testXXX", "-testXXX")) + newHashSet( + IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "-testXXX")) + ), + equalTo(resolvedExpressionsSet("testXXX", "-testXXX")) ); } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { IndexNotFoundException infe = expectThrows( @@ -103,8 +106,8 @@ public void testConvertWildcardsJustIndicesTests() { assertEquals("-testXXX", infe.getIndex().getName()); } assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testXXX", "-testX*"))), + equalTo(resolvedExpressionsSet("testXXX")) ); } @@ -122,24 +125,24 @@ public void testConvertWildcardsTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testYY*", "alias*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), - equalTo(newHashSet("-kuku")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("-kuku"))), + equalTo(resolvedExpressionsSet("-kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), - equalTo(newHashSet("testXXX", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*", "-testYYY"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*", "testYYY"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testYYY", "testX*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); } @@ -159,8 +162,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), - equalTo(newHashSet("testXXX", "testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -168,8 +171,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), - equalTo(newHashSet("testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), + equalTo(resolvedExpressionsSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -177,8 +180,8 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), - equalTo(newHashSet("testXXX", "testXXY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("testX*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXXY")) ); context = new IndexNameExpressionResolver.Context( state, @@ -217,28 +220,27 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), - equalTo(newHashSet("testXXX", "testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*X*"))), + equalTo(resolvedExpressionsSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), - equalTo(newHashSet("testXXY", "testXYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*X*Y"))), + equalTo(resolvedExpressionsSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), - equalTo(newHashSet("kukuYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("kuku*Y*"))), + equalTo(resolvedExpressionsSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), - equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*Y*"))), + equalTo(resolvedExpressionsSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("test*Y*X"))).size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, resolvedExpressions("*Y*X"))).size(), equalTo(0) ); } @@ -257,11 +259,11 @@ public void testAll() { ); assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); assertThat( newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) + equalTo(resolvedExpressionsSet("testXXX", "testXYY", "testYYY")) ); IndicesOptions noExpandOptions = IndicesOptions.fromOptions( randomBoolean(), @@ -298,7 +300,7 @@ public void testAllAliases() { IndicesOptions.lenientExpandOpen(), // don't include hidden SystemIndexAccessLevel.NONE ); - assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(Set.of())); } { @@ -319,7 +321,7 @@ public void testAllAliases() { ); assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(newHashSet("index-visible-alias")) + equalTo(resolvedExpressionsSet("index-visible-alias")) ); } } @@ -362,7 +364,7 @@ public void testAllDataStreams() { assertThat( newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), - equalTo(newHashSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) + equalTo(resolvedExpressionsSet(DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis))) ); } @@ -385,7 +387,7 @@ public void testAllDataStreams() { NONE ); - assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet())); + assertThat(newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(Set.of())); } } @@ -506,16 +508,16 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("foo_a*") + resolvedExpressions("foo_a*") ); - assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_index", "bar_index"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - Collections.singletonList("foo_a*") + resolvedExpressions("foo_a*") ); assertEquals(0, indices.size()); } @@ -524,45 +526,45 @@ public void testResolveAliases() { IndexNotFoundException.class, () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesStrictContext, - Collections.singletonList("foo_a*") + resolvedExpressions("foo_a*") ) ); assertEquals("foo_a*", infe.getIndex().getName()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("foo*") + resolvedExpressions("foo*") ); - assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index", "bar_index"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - Collections.singletonList("foo*") + resolvedExpressions("foo*") ); - assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesStrictContext, - Collections.singletonList("foo*") + resolvedExpressions("foo*") ); - assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("foo_alias") + resolvedExpressions("foo_alias") ); - assertThat(indices, containsInAnyOrder("foo_alias")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( skipAliasesLenientContext, - Collections.singletonList("foo_alias") + resolvedExpressions("foo_alias") ); - assertThat(indices, containsInAnyOrder("foo_alias")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); } { IllegalArgumentException iae = expectThrows( @@ -581,11 +583,11 @@ public void testResolveAliases() { SystemIndexAccessLevel.NONE ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( noExpandNoAliasesContext, - List.of("foo_alias") + resolvedExpressions("foo_alias") ); - assertThat(indices, containsInAnyOrder("foo_alias")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_alias"))); } IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( false, @@ -654,18 +656,18 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("foo_*") + resolvedExpressions("foo_*") ); - assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("foo_index", "foo_foo", "bar_index"))); // data streams are not included and expression doesn't match the data steram indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAndAliasesContext, - Collections.singletonList("bar_*") + resolvedExpressions("bar_*") ); - assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); + assertThat(newHashSet(indices), equalTo(resolvedExpressionsSet("bar_bar", "bar_index"))); } { @@ -691,35 +693,39 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesAndDataStreamsContext, - Collections.singletonList("foo_*") + resolvedExpressions("foo_*") ); assertThat( - indices, - containsInAnyOrder( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + newHashSet(indices), + equalTo( + resolvedExpressionsSet( + "foo_index", + "bar_index", + "foo_foo", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + ) ) ); // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesAndDataStreamsContext, - Collections.singletonList("*") + resolvedExpressions("*") ); assertThat( - indices, - containsInAnyOrder( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + newHashSet(indices), + equalTo( + resolvedExpressionsSet( + "foo_index", + "bar_index", + "foo_foo", + "bar_bar", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + ) ) ); } @@ -748,35 +754,39 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("foo_*") + resolvedExpressions("foo_*") ); assertThat( - indices, - containsInAnyOrder( - "foo_index", - "bar_index", - "foo_foo", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + newHashSet(indices), + equalTo( + resolvedExpressionsSet( + "foo_index", + "bar_index", + "foo_foo", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + ) ) ); // include all wildcard adds the data stream's backing indices indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("*") + resolvedExpressions("*") ); assertThat( - indices, - containsInAnyOrder( - "foo_index", - "bar_index", - "foo_foo", - "bar_bar", - DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), - DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + newHashSet(indices), + equalTo( + resolvedExpressionsSet( + "foo_index", + "bar_index", + "foo_foo", + "bar_bar", + DataStream.getDefaultBackingIndexName("foo_logs", 1, epochMillis), + DataStream.getDefaultBackingIndexName("foo_logs", 2, epochMillis) + ) ) ); } @@ -808,16 +818,28 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); - assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); - assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); - assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); - assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); - assertThat(matches, containsInAnyOrder("foo_alias")); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indicesAndAliasesContext, + List.of(new ResolvedExpression("*")) + ); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("bar_bar", "foo_foo", "foo_index", "bar_index"))); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of(new ResolvedExpression("*"))); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("bar_bar", "foo_foo", "foo_index", "bar_index"))); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indicesAndAliasesContext, + List.of(new ResolvedExpression("foo*")) + ); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_foo", "foo_index", "bar_index"))); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + onlyIndicesContext, + List.of(new ResolvedExpression("foo*")) + ); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_foo", "foo_index"))); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indicesAndAliasesContext, + List.of(new ResolvedExpression("foo_alias")) + ); + assertThat(newHashSet(matches), equalTo(resolvedExpressionsSet("foo_alias"))); IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") @@ -840,8 +862,19 @@ private static IndexMetadata.Builder indexBuilder(String index) { private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { IndexNotFoundException infe = expectThrows( IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) + () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + context, + List.of(new ResolvedExpression(wildcardExpression)) + ) ); assertEquals(wildcardExpression, infe.getIndex().getName()); } + + private List resolvedExpressions(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).toList(); + } + + private Set resolvedExpressionsSet(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet()); + } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 36f7355a541c1..17975b7d18dd8 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -77,6 +78,7 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; +import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -677,27 +679,27 @@ public void testBuildAliasFilter() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-0")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-0")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-0")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-0")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-1")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "baz"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-1")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bax"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-0", Set.of("test-alias-0", "test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-0", resolvedExpressions("test-alias-0", "test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0", "test-alias-1")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -706,7 +708,7 @@ public void testBuildAliasFilter() { assertThat(filter.should(), containsInAnyOrder(QueryBuilders.termQuery("foo", "baz"), QueryBuilders.termQuery("foo", "bar"))); } { - AliasFilter result = indicesService.buildAliasFilter(state, "test-1", Set.of("test-alias-0", "test-alias-1")); + AliasFilter result = indicesService.buildAliasFilter(state, "test-1", resolvedExpressions("test-alias-0", "test-alias-1")); assertThat(result.getAliases(), arrayContainingInAnyOrder("test-alias-0", "test-alias-1")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -718,7 +720,7 @@ public void testBuildAliasFilter() { AliasFilter result = indicesService.buildAliasFilter( state, "test-0", - Set.of("test-alias-0", "test-alias-1", "test-alias-non-filtering") + resolvedExpressions("test-alias-0", "test-alias-1", "test-alias-non-filtering") ); assertThat(result.getAliases(), emptyArray()); assertThat(result.getQueryBuilder(), nullValue()); @@ -727,7 +729,7 @@ public void testBuildAliasFilter() { AliasFilter result = indicesService.buildAliasFilter( state, "test-1", - Set.of("test-alias-0", "test-alias-1", "test-alias-non-filtering") + resolvedExpressions("test-alias-0", "test-alias-1", "test-alias-non-filtering") ); assertThat(result.getAliases(), emptyArray()); assertThat(result.getQueryBuilder(), nullValue()); @@ -754,19 +756,19 @@ public void testBuildAliasFilterDataStreamAliases() { ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); { String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "bar"))); } { String index = backingIndex2.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo")); assertThat(result.getQueryBuilder(), equalTo(QueryBuilders.termQuery("foo", "baz"))); } { String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo", "logs")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -776,7 +778,7 @@ public void testBuildAliasFilterDataStreamAliases() { } { String index = backingIndex2.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs")); assertThat(result.getAliases(), arrayContainingInAnyOrder("logs_foo", "logs")); BoolQueryBuilder filter = (BoolQueryBuilder) result.getQueryBuilder(); assertThat(filter.filter(), empty()); @@ -787,13 +789,13 @@ public void testBuildAliasFilterDataStreamAliases() { { // querying an unfiltered and a filtered alias for the same data stream should drop the filters String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs_foo", "logs", "logs_bar")); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs_foo", "logs", "logs_bar")); assertThat(result, is(AliasFilter.EMPTY)); } { // similarly, querying the data stream name and a filtered alias should drop the filter String index = backingIndex1.getIndex().getName(); - AliasFilter result = indicesService.buildAliasFilter(state, index, Set.of("logs", dataStreamName1)); + AliasFilter result = indicesService.buildAliasFilter(state, index, resolvedExpressions("logs", dataStreamName1)); assertThat(result, is(AliasFilter.EMPTY)); } } @@ -846,4 +848,8 @@ public void testWithTempIndexServiceHandlesExistingIndex() throws Exception { return null; }); } + + private Set resolvedExpressions(String... expressions) { + return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet()); + } } From 8c3d19badc104e5de565548ee53aa6a7745faadf Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 14 Oct 2024 16:27:37 +0200 Subject: [PATCH 16/19] Update IndexSettingProvider#getAdditionalIndexSettings() signature (#114150) With logsdb another index mode is available, the isTimeSeries parameter is limiting. Instead, we should just push down the index mode from template to index settings provider. Follow up from #113451 Relates to #113583 --- .../DataStreamIndexSettingsProvider.java | 9 +-- .../DataStreamIndexSettingsProviderTests.java | 24 +++---- .../TransportSimulateIndexTemplateAction.java | 2 +- .../cluster/metadata/Metadata.java | 17 ----- .../metadata/MetadataCreateIndexService.java | 8 +-- .../MetadataIndexTemplateService.java | 2 +- .../cluster/routing/allocation/DataTier.java | 3 +- .../index/IndexSettingProvider.java | 21 +++--- ...sportSimulateIndexTemplateActionTests.java | 3 +- .../cluster/metadata/MetadataTests.java | 70 +++++++++++++++++-- .../index/IndexSettingProviderTests.java | 2 +- .../LogsdbIndexModeSettingsProvider.java | 2 +- .../SyntheticSourceIndexSettingsProvider.java | 12 ++-- .../LogsdbIndexModeSettingsProviderTests.java | 30 ++++---- ...heticSourceIndexSettingsProviderTests.java | 26 +++---- 15 files changed, 138 insertions(+), 93 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index a3d0347c3d192..d6a0fd86265e5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -59,7 +59,7 @@ public class DataStreamIndexSettingsProvider implements IndexSettingProvider { public Settings getAdditionalIndexSettings( String indexName, @Nullable String dataStreamName, - boolean isTimeSeries, + @Nullable IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, @@ -70,15 +70,16 @@ public Settings getAdditionalIndexSettings( // First backing index is created and then data stream is rolled over (in a single cluster state update). // So at this point we can't check index_mode==time_series, // so checking that index_mode==null|standard and templateIndexMode == TIME_SERIES + boolean isMigratingToTimeSeries = templateIndexMode == IndexMode.TIME_SERIES; boolean migrating = dataStream != null && (dataStream.getIndexMode() == null || dataStream.getIndexMode() == IndexMode.STANDARD) - && isTimeSeries; + && isMigratingToTimeSeries; IndexMode indexMode; if (migrating) { indexMode = IndexMode.TIME_SERIES; } else if (dataStream != null) { - indexMode = isTimeSeries ? dataStream.getIndexMode() : null; - } else if (isTimeSeries) { + indexMode = isMigratingToTimeSeries ? dataStream.getIndexMode() : null; + } else if (isMigratingToTimeSeries) { indexMode = IndexMode.TIME_SERIES; } else { indexMode = null; diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java index d8d4a9c03933a..015752724cb5d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java @@ -78,7 +78,7 @@ public void testGetAdditionalIndexSettings() throws Exception { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -123,7 +123,7 @@ public void testGetAdditionalIndexSettingsIndexRoutingPathAlreadyDefined() throw Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -193,7 +193,7 @@ public void testGetAdditionalIndexSettingsMappingsMerging() throws Exception { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -218,7 +218,7 @@ public void testGetAdditionalIndexSettingsNoMappings() { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -243,7 +243,7 @@ public void testGetAdditionalIndexSettingsLookAheadTime() throws Exception { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -268,7 +268,7 @@ public void testGetAdditionalIndexSettingsLookBackTime() throws Exception { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -299,7 +299,7 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreated() throws Exce var result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -336,7 +336,7 @@ public void testGetAdditionalIndexSettingsDataStreamAlreadyCreatedTimeSettingsMi () -> provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -362,7 +362,7 @@ public void testGetAdditionalIndexSettingsNonTsdbTemplate() { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - false, + null, metadata, Instant.ofEpochMilli(1L), settings, @@ -382,7 +382,7 @@ public void testGetAdditionalIndexSettingsMigrateToTsdb() { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 2), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, @@ -415,7 +415,7 @@ public void testGetAdditionalIndexSettingsDowngradeFromTsdb() { Settings result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 2), dataStreamName, - false, + null, metadata, Instant.ofEpochMilli(1L), settings, @@ -694,7 +694,7 @@ private Settings generateTsdbSettings(String mapping, Instant now) throws IOExce var result = provider.getAdditionalIndexSettings( DataStream.getDefaultBackingIndexName(dataStreamName, 1), dataStreamName, - true, + IndexMode.TIME_SERIES, metadata, now, settings, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index ec8eb4babfdac..5e3799cd14518 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -274,7 +274,7 @@ public static Template resolveTemplate( Settings result = provider.getAdditionalIndexSettings( indexName, template.getDataStreamTemplate() != null ? indexName : null, - template.getDataStreamTemplate() != null && metadata.isTimeSeriesTemplate(template), + metadata.retrieveIndexModeFromTemplate(template), simulatedState.getMetadata(), now, templateSettings, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index 0756080c16d00..b7777eca86179 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -1316,23 +1316,6 @@ public Map templatesV2() { .orElse(Collections.emptyMap()); } - // TODO: remove this method: - public boolean isTimeSeriesTemplate(ComposableIndexTemplate indexTemplate) { - var indexModeFromTemplate = retrieveIndexModeFromTemplate(indexTemplate); - if (indexModeFromTemplate == IndexMode.TIME_SERIES) { - // No need to check for the existence of index.routing_path here, because index.mode=time_series can't be specified without it. - // Setting validation takes care of this. - // Also no need to validate that the fields defined in index.routing_path are keyword fields with time_series_dimension - // attribute enabled. This is validated elsewhere (DocumentMapper). - return true; - } - - // in a followup change: check the existence of keyword fields of type keyword and time_series_dimension attribute enabled in - // the template. In this case the index.routing_path setting can be generated from the mapping. - - return false; - } - public IndexMode retrieveIndexModeFromTemplate(ComposableIndexTemplate indexTemplate) { if (indexTemplate.getDataStreamTemplate() == null) { return null; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 1cebbabde0769..7f2c076281735 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -982,10 +982,10 @@ static Settings aggregateIndexSettings( if (sourceMetadata == null) { final Settings templateAndRequestSettings = Settings.builder().put(combinedTemplateSettings).put(request.settings()).build(); - final boolean timeSeriesTemplate = Optional.of(request) + final IndexMode templateIndexMode = Optional.of(request) .map(CreateIndexClusterStateUpdateRequest::matchingTemplate) - .map(metadata::isTimeSeriesTemplate) - .orElse(false); + .map(metadata::retrieveIndexModeFromTemplate) + .orElse(null); // Loop through all the explicit index setting providers, adding them to the // additionalIndexSettings map @@ -995,7 +995,7 @@ static Settings aggregateIndexSettings( var newAdditionalSettings = provider.getAdditionalIndexSettings( request.index(), request.dataStreamName(), - timeSeriesTemplate, + templateIndexMode, currentState.getMetadata(), resolvedAt, templateAndRequestSettings, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 57194ded9422e..ccdfaa5518aee 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -705,7 +705,7 @@ private void validateIndexTemplateV2(String name, ComposableIndexTemplate indexT var newAdditionalSettings = provider.getAdditionalIndexSettings( "validate-index-name", indexTemplate.getDataStreamTemplate() != null ? "validate-data-stream-name" : null, - indexTemplate.getDataStreamTemplate() != null && metadata.isTimeSeriesTemplate(indexTemplate), + metadata.retrieveIndexModeFromTemplate(indexTemplate), currentState.getMetadata(), now, combinedSettings, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java index 3c559f9421a38..4c2f0cbaaf729 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; @@ -226,7 +227,7 @@ public static class DefaultHotAllocationSettingProvider implements IndexSettingP public Settings getAdditionalIndexSettings( String indexName, @Nullable String dataStreamName, - boolean isTimeSeries, + IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index aaa4c738c0e13..0180d2c8df119 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -30,20 +30,21 @@ public interface IndexSettingProvider { * Returns explicitly set default index {@link Settings} for the given index. This should not * return null. * - * @param indexName The name of the new index being created - * @param dataStreamName The name of the data stream if the index being created is part of a data stream otherwise - * null - * @param isTimeSeries Whether the template is in time series mode. - * @param metadata The current metadata instance that doesn't yet contain the index to be created - * @param resolvedAt The time the request to create this new index was accepted. - * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings - * defined on the create index request - * @param combinedTemplateMappings All the mappings resolved from the template that matches + * @param indexName The name of the new index being created + * @param dataStreamName The name of the data stream if the index being created is part of a data stream + * otherwise null + * @param templateIndexMode The index mode defined in template if template creates data streams, + * otherwise null is returned. + * @param metadata The current metadata instance that doesn't yet contain the index to be created + * @param resolvedAt The time the request to create this new index was accepted. + * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings + * defined on the create index request + * @param combinedTemplateMappings All the mappings resolved from the template that matches */ Settings getAdditionalIndexSettings( String indexName, @Nullable String dataStreamName, - boolean isTimeSeries, + @Nullable IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java index 8f0ff82beab4b..74408b99e92ce 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/post/TransportSimulateIndexTemplateActionTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndices; @@ -69,7 +70,7 @@ public void testSettingsProviderIsOverridden() throws Exception { public Settings getAdditionalIndexSettings( String indexName, String dataStreamName, - boolean timeSeries, + IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings allSettings, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 00e21603ec8b4..ba1f9f01f49d2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -2412,30 +2413,87 @@ public void testEnsureMetadataFieldCheckedForGlobalStateChanges() { assertThat(unclassifiedFields, empty()); } - public void testIsTimeSeriesTemplate() throws IOException { - var template = new Template(Settings.builder().put("index.mode", "time_series").build(), new CompressedXContent("{}"), null); + public void testRetrieveIndexModeFromTemplateTsdb() throws IOException { + // tsdb: + var tsdbTemplate = new Template(Settings.builder().put("index.mode", "time_series").build(), new CompressedXContent("{}"), null); // Settings in component template: { - var componentTemplate = new ComponentTemplate(template, null, null); + var componentTemplate = new ComponentTemplate(tsdbTemplate, null, null); var indexTemplate = ComposableIndexTemplate.builder() .indexPatterns(List.of("test-*")) .componentTemplates(List.of("component_template_1")) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(); Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); - assertThat(m.isTimeSeriesTemplate(indexTemplate), is(true)); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.TIME_SERIES)); } // Settings in composable index template: { var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null); var indexTemplate = ComposableIndexTemplate.builder() .indexPatterns(List.of("test-*")) - .template(template) + .template(tsdbTemplate) .componentTemplates(List.of("component_template_1")) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(); Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); - assertThat(m.isTimeSeriesTemplate(indexTemplate), is(true)); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.TIME_SERIES)); + } + } + + public void testRetrieveIndexModeFromTemplateLogsdb() throws IOException { + // logsdb: + var logsdbTemplate = new Template(Settings.builder().put("index.mode", "logsdb").build(), new CompressedXContent("{}"), null); + // Settings in component template: + { + var componentTemplate = new ComponentTemplate(logsdbTemplate, null, null); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.LOGSDB)); + } + // Settings in composable index template: + { + var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template(logsdbTemplate) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), is(IndexMode.LOGSDB)); + } + } + + public void testRetrieveIndexModeFromTemplateEmpty() throws IOException { + // no index mode: + var emptyTemplate = new Template(Settings.EMPTY, new CompressedXContent("{}"), null); + // Settings in component template: + { + var componentTemplate = new ComponentTemplate(emptyTemplate, null, null); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), nullValue()); + } + // Settings in composable index template: + { + var componentTemplate = new ComponentTemplate(new Template(null, null, null), null, null); + var indexTemplate = ComposableIndexTemplate.builder() + .indexPatterns(List.of("test-*")) + .template(emptyTemplate) + .componentTemplates(List.of("component_template_1")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata m = Metadata.builder().put("component_template_1", componentTemplate).put("index_template_1", indexTemplate).build(); + assertThat(m.retrieveIndexModeFromTemplate(indexTemplate), nullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java b/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java index 387340c0a6f50..628de0b047bf5 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSettingProviderTests.java @@ -79,7 +79,7 @@ static class TestIndexSettingsProvider implements IndexSettingProvider { public Settings getAdditionalIndexSettings( String indexName, String dataStreamName, - boolean isTimeSeries, + IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java index b463426de0848..ee9d6129dcd54 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java @@ -42,7 +42,7 @@ void updateClusterIndexModeLogsdbEnabled(boolean isLogsdbEnabled) { public Settings getAdditionalIndexSettings( final String indexName, final String dataStreamName, - boolean isTimeSeries, + IndexMode templateIndexMode, final Metadata metadata, final Instant resolvedAt, final Settings settings, diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index 6e139cc3ce9e6..a190ff72de8df 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -50,7 +50,7 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider public Settings getAdditionalIndexSettings( String indexName, String dataStreamName, - boolean isTimeSeries, + IndexMode templateIndexMode, Metadata metadata, Instant resolvedAt, Settings indexTemplateAndCreateRequestSettings, @@ -59,7 +59,7 @@ public Settings getAdditionalIndexSettings( // This index name is used when validating component and index templates, we should skip this check in that case. // (See MetadataIndexTemplateService#validateIndexTemplateV2(...) method) boolean isTemplateValidation = "validate-index-name".equals(indexName); - if (newIndexHasSyntheticSourceUsage(indexName, isTimeSeries, indexTemplateAndCreateRequestSettings, combinedTemplateMappings) + if (newIndexHasSyntheticSourceUsage(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings, combinedTemplateMappings) && syntheticSourceLicenseService.fallbackToStoredSource(isTemplateValidation)) { LOGGER.debug("creation of index [{}] with synthetic source without it being allowed", indexName); // TODO: handle falling back to stored source @@ -69,7 +69,7 @@ public Settings getAdditionalIndexSettings( boolean newIndexHasSyntheticSourceUsage( String indexName, - boolean isTimeSeries, + IndexMode templateIndexMode, Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { @@ -80,7 +80,7 @@ boolean newIndexHasSyntheticSourceUsage( } try { - var tmpIndexMetadata = buildIndexMetadataForMapperService(indexName, isTimeSeries, indexTemplateAndCreateRequestSettings); + var tmpIndexMetadata = buildIndexMetadataForMapperService(indexName, templateIndexMode, indexTemplateAndCreateRequestSettings); try (var mapperService = mapperServiceFactory.apply(tmpIndexMetadata)) { // combinedTemplateMappings can be null when creating system indices // combinedTemplateMappings can be empty when creating a normal index that doesn't match any template and without mapping. @@ -101,7 +101,7 @@ boolean newIndexHasSyntheticSourceUsage( // Create a dummy IndexMetadata instance that can be used to create a MapperService in order to check whether synthetic source is used: private IndexMetadata buildIndexMetadataForMapperService( String indexName, - boolean isTimeSeries, + IndexMode templateIndexMode, Settings indexTemplateAndCreateRequestSettings ) { var tmpIndexMetadata = IndexMetadata.builder(indexName); @@ -119,7 +119,7 @@ private IndexMetadata buildIndexMetadataForMapperService( .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, shardReplicas) .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()); - if (isTimeSeries) { + if (templateIndexMode == IndexMode.TIME_SERIES) { finalResolvedSettings.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES); // Avoid failing because index.routing_path is missing (in case fields are marked as dimension) finalResolvedSettings.putList(INDEX_ROUTING_PATH.getKey(), List.of("path")); diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java index 04e89af254f64..5f23dbdca1143 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java @@ -51,7 +51,7 @@ public void testLogsDbDisabled() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -69,7 +69,7 @@ public void testOnIndexCreation() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( "logs-apache-production", null, - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -87,7 +87,7 @@ public void testOnExplicitStandardIndex() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.STANDARD.getName()).build(), @@ -105,7 +105,7 @@ public void testOnExplicitTimeSeriesIndex() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.getName()).build(), @@ -123,7 +123,7 @@ public void testNonLogsDataStream() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -141,7 +141,7 @@ public void testWithoutLogsComponentTemplate() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of()), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -159,7 +159,7 @@ public void testWithLogsComponentTemplate() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -177,7 +177,7 @@ public void testWithMultipleComponentTemplates() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings", "logs@custom")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -195,7 +195,7 @@ public void testWithCustomComponentTemplatesOnly() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@custom", "custom-component-template")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -213,7 +213,7 @@ public void testNonMatchingTemplateIndexPattern() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("standard-apache-production"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -231,7 +231,7 @@ public void testCaseSensitivity() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "LOGS-apache-production", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -249,7 +249,7 @@ public void testMultipleHyphensInDataStreamName() throws IOException { final Settings additionalIndexSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production-eu", - false, + null, Metadata.EMPTY_METADATA, Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -267,7 +267,7 @@ public void testBeforeAndAFterSettingUpdate() throws IOException { final Settings beforeSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -281,7 +281,7 @@ public void testBeforeAndAFterSettingUpdate() throws IOException { final Settings afterSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, @@ -295,7 +295,7 @@ public void testBeforeAndAFterSettingUpdate() throws IOException { final Settings laterSettings = provider.getAdditionalIndexSettings( null, "logs-apache-production", - false, + null, buildMetadata(List.of("*"), List.of("logs@settings")), Instant.now().truncatedTo(ChronoUnit.SECONDS), Settings.EMPTY, diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index c97328da132bd..738487b9365a7 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -49,7 +49,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { } } """; - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); } { @@ -82,7 +82,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { } """; } - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); } } @@ -104,7 +104,7 @@ public void testValidateIndexName() throws IOException { } """; Settings settings = Settings.EMPTY; - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); } @@ -124,22 +124,22 @@ public void testNewIndexHasSyntheticSourceUsageLogsdbIndex() throws IOException """; { Settings settings = Settings.builder().put("index.mode", "logsdb").build(); - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); } { Settings settings = Settings.builder().put("index.mode", "logsdb").build(); - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of()); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of()); assertTrue(result); } { - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, Settings.EMPTY, List.of()); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, Settings.EMPTY, List.of()); assertFalse(result); } { boolean result = provider.newIndexHasSyntheticSourceUsage( indexName, - false, + null, Settings.EMPTY, List.of(new CompressedXContent(mapping)) ); @@ -164,22 +164,22 @@ public void testNewIndexHasSyntheticSourceUsageTimeSeries() throws IOException { """; { Settings settings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "my_field").build(); - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertTrue(result); } { Settings settings = Settings.builder().put("index.mode", "time_series").put("index.routing_path", "my_field").build(); - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of()); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of()); assertTrue(result); } { - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, Settings.EMPTY, List.of()); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, Settings.EMPTY, List.of()); assertFalse(result); } { boolean result = provider.newIndexHasSyntheticSourceUsage( indexName, - false, + null, Settings.EMPTY, List.of(new CompressedXContent(mapping)) ); @@ -206,7 +206,7 @@ public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOExcep } } """; - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); } { @@ -221,7 +221,7 @@ public void testNewIndexHasSyntheticSourceUsage_invalidSettings() throws IOExcep } } """; - boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, false, settings, List.of(new CompressedXContent(mapping))); + boolean result = provider.newIndexHasSyntheticSourceUsage(indexName, null, settings, List.of(new CompressedXContent(mapping))); assertFalse(result); } } From b78cf6cd1e306363243087bf2ddf6ffb8b666ef8 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 14 Oct 2024 15:28:39 +0100 Subject: [PATCH 17/19] Fix Max Score Propagation in RankDocsQuery (#114716) Fix rank doc query when some segments have no ranked docs --- .../search/retriever/rankdoc/RankDocsQuery.java | 9 ++++----- .../rankdoc/RankDocsQueryBuilderTests.java | 17 +++++++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java index fb5015a82dbdb..b78d9e40ba120 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java @@ -107,11 +107,10 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio @Override public Scorer scorer(LeafReaderContext context) { - // Segment starts indicate how many docs are in the segment, - // upper equalling lower indicates no documents for this segment - if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { - return null; - } + /** + * We return a scorer even if there are no ranked documents within the segment. + * This ensures the correct propagation of the maximum score. + */ return new Scorer(this) { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; diff --git a/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java index ca05c57b7d733..b295b78453f93 100644 --- a/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java @@ -195,6 +195,23 @@ public void testRankDocsQueryEarlyTerminate() throws IOException { assertThat(col.totalHits.value, equalTo((long) topSize)); assertEqualTopDocs(col.scoreDocs, rankDocs); } + + { + // A single rank doc in the last segment + RankDoc[] singleRankDoc = new RankDoc[1]; + singleRankDoc[0] = rankDocs[rankDocs.length - 1]; + RankDocsQuery q = new RankDocsQuery( + reader, + singleRankDoc, + new Query[] { NumericDocValuesField.newSlowExactQuery("active", 1) }, + new String[1], + false + ); + var topDocsManager = new TopScoreDocCollectorManager(1, null, 0); + var col = searcher.search(q, topDocsManager); + assertThat(col.totalHits.value, lessThanOrEqualTo((long) (2 + rankDocs.length))); + assertEqualTopDocs(col.scoreDocs, singleRankDoc); + } } } } From e1451997b1bbbf3f22b4b552352e65dc62e6a947 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Mon, 14 Oct 2024 10:51:50 -0400 Subject: [PATCH 18/19] [ML] Switch default chunking strategy to sentence (#114453) --- docs/changelog/114453.yaml | 5 +++++ .../xpack/inference/chunking/ChunkingSettingsBuilder.java | 2 +- .../inference/chunking/ChunkingSettingsBuilderTests.java | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/114453.yaml diff --git a/docs/changelog/114453.yaml b/docs/changelog/114453.yaml new file mode 100644 index 0000000000000..0d5345ad9d2a6 --- /dev/null +++ b/docs/changelog/114453.yaml @@ -0,0 +1,5 @@ +pr: 114453 +summary: Switch default chunking strategy to sentence +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java index 477c3ea6352f5..20520ca829297 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilder.java @@ -13,7 +13,7 @@ import java.util.Map; public class ChunkingSettingsBuilder { - public static final WordBoundaryChunkingSettings DEFAULT_SETTINGS = new WordBoundaryChunkingSettings(250, 100); + public static final SentenceBoundaryChunkingSettings DEFAULT_SETTINGS = new SentenceBoundaryChunkingSettings(250, 1); public static ChunkingSettings fromMap(Map settings) { if (settings.isEmpty()) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java index 3c09984ac0162..5b9625073e6c6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/ChunkingSettingsBuilderTests.java @@ -17,7 +17,7 @@ public class ChunkingSettingsBuilderTests extends ESTestCase { - public static final WordBoundaryChunkingSettings DEFAULT_SETTINGS = new WordBoundaryChunkingSettings(250, 100); + public static final SentenceBoundaryChunkingSettings DEFAULT_SETTINGS = new SentenceBoundaryChunkingSettings(250, 1); public void testEmptyChunkingSettingsMap() { ChunkingSettings chunkingSettings = ChunkingSettingsBuilder.fromMap(Collections.emptyMap()); From 0c02c2b66367713bc29cc38a983669c76865c426 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 14 Oct 2024 17:16:12 +0200 Subject: [PATCH 19/19] Don't close/recreate adaptive allocations metrics (#114721) --- .../AdaptiveAllocationsScalerService.java | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 193fa9e7e07f9..8f43044a465c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -171,17 +171,6 @@ Collection observeDouble(Function