From 4c556fcdff64fc89a5619d7591a4d1aa68f4c34c Mon Sep 17 00:00:00 2001 From: Julia Bardi <90178898+juliaElastic@users.noreply.github.com> Date: Thu, 28 Mar 2024 11:30:05 +0100 Subject: [PATCH 01/69] [Fleet] Added all privilege to kibana_system to logs-fleet_server.* index pattern (#106815) * Update KibanaOwnedReservedRoleDescriptors.java * replaced all with read, delete_index --- .../authz/store/KibanaOwnedReservedRoleDescriptors.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 46e4a5cec2486..3c76734b794d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -198,7 +198,7 @@ static RoleDescriptor kibanaSystem(String name) { // Fleet publishes Agent metrics in kibana task runner RoleDescriptor.IndicesPrivileges.builder().indices("metrics-fleet_server*").privileges("all").build(), // Fleet reads output health from this index pattern - RoleDescriptor.IndicesPrivileges.builder().indices("logs-fleet_server*").privileges("read").build(), + RoleDescriptor.IndicesPrivileges.builder().indices("logs-fleet_server*").privileges("read", "delete_index").build(), // Legacy "Alerts as data" used in Security Solution. // Kibana user creates these indices; reads / writes to them. RoleDescriptor.IndicesPrivileges.builder().indices(ReservedRolesStore.ALERTS_LEGACY_INDEX).privileges("all").build(), From 80094b2c3a790e5dcbee964695af649ddf16d545 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Thu, 28 Mar 2024 12:34:49 +0100 Subject: [PATCH 02/69] Unmute {p0=data_stream/10_basic/Delete data stream with failure stores} (#106865) --- .../resources/rest-api-spec/test/data_stream/10_basic.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 22b541425b74f..23f8715b5787f 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -538,10 +538,8 @@ setup: --- "Delete data stream with failure stores": - skip: - # version: " - 8.11.99" - # reason: "data streams only supported in 8.12+" - version: all - reason: AwaitsFix https://github.com/elastic/elasticsearch/issues/104348 + version: " - 8.11.99" + reason: "data stream failure stores only supported in 8.12+" - do: allowed_warnings: From 9e06cbfe0c08eaeeaa1f12c456fb1719f99d390d Mon Sep 17 00:00:00 2001 From: QY Date: Thu, 28 Mar 2024 21:01:03 +0800 Subject: [PATCH 03/69] Fix `noop_update_total` is not being updated when using the `_bulk` (#105745) Closes #105742 --- docs/changelog/105745.yaml | 6 +++ .../rest-api-spec/test/bulk/12_noop.yml | 49 +++++++++++++++++++ .../action/bulk/TransportShardBulkAction.java | 1 + 3 files changed, 56 insertions(+) create mode 100644 docs/changelog/105745.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/12_noop.yml diff --git a/docs/changelog/105745.yaml b/docs/changelog/105745.yaml new file mode 100644 index 0000000000000..e9a61f692d94d --- /dev/null +++ b/docs/changelog/105745.yaml @@ -0,0 +1,6 @@ +pr: 105745 +summary: Fix `noop_update_total` is not being updated when using the `_bulk` +area: CRUD +type: bug +issues: + - 105742 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/12_noop.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/12_noop.yml new file mode 100644 index 0000000000000..d0efcc0b67006 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/12_noop.yml @@ -0,0 +1,49 @@ +--- +"Noop": + - skip: + version: " - 8.13.99" + reason: fixed in 8.14.0 + - do: + indices.create: + index: test_1 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + - do: + cluster.health: + wait_for_status: green + - do: + bulk: + refresh: true + body: | + { "create": { "_index": "test_1", "_id": "1"} } + { "foo": "bar" } + { "create": { "_index": "test_1", "_id": "2"} } + { "foo": "bar" } + - do: + indices.stats: { index: test_1 } + + - match: { indices.test_1.total.indexing.index_total: 2 } + - match: { indices.test_1.primaries.indexing.index_total: 2 } + - match: { indices.test_1.total.indexing.noop_update_total: 0 } + - match: { indices.test_1.primaries.indexing.noop_update_total: 0 } + + - do: + bulk: + body: | + { "update": { "_index": "test_1", "_id": "1"} } + { "doc": { "foo": "bar" } } + { "update": { "_index": "test_1", "_id": "2"} } + { "doc": { "foo": "bar" } } + + - match: { items.0.update.result: noop } + - match: { items.1.update.result: noop } + + - do: + indices.stats: { index: test_1 } + + - match: { indices.test_1.total.indexing.index_total: 2 } + - match: { indices.test_1.primaries.indexing.index_total: 2 } + - match: { indices.test_1.total.indexing.noop_update_total: 2 } # total noop == primaries noop + - match: { indices.test_1.primaries.indexing.noop_update_total: 2 } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index fe7af4bc26e6e..5c1f0e4aa7306 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -339,6 +339,7 @@ static boolean executeBulkItemRequest( if (updateResult.getResponseResult() == DocWriteResponse.Result.NOOP) { context.markOperationAsNoOp(updateResult.action()); context.markAsCompleted(context.getExecutionResult()); + context.getPrimary().noopUpdate(); return true; } context.setRequestToExecute(updateResult.action()); From b85d4b1dbb89512d68370aee819c50c6c9200335 Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Thu, 28 Mar 2024 14:02:47 +0100 Subject: [PATCH 04/69] Fix typo in functions/README.md (#106870) --- docs/reference/esql/functions/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/esql/functions/README.md b/docs/reference/esql/functions/README.md index 0f0f3b6e3cbb8..7be4c70fbe6b0 100644 --- a/docs/reference/esql/functions/README.md +++ b/docs/reference/esql/functions/README.md @@ -12,10 +12,10 @@ directory that can `include::` any parts of the files above. To regenerate the files for a function run its tests using gradle: ``` -./gradlew :x-pack:plugin:esql:tests -Dtests.class='*SinTests' +./gradlew :x-pack:plugin:esql:test -Dtests.class='*SinTests' ``` To regenerate the files for all functions run all of ESQL's tests using gradle: ``` -./gradlew :x-pack:plugin:esql:tests +./gradlew :x-pack:plugin:esql:test ``` From c7a35a4ee37d3f5ba90559153f68b4db7eac048c Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Thu, 28 Mar 2024 14:31:37 +0100 Subject: [PATCH 05/69] Verify feature usage in REST tests (#106800) Regular feature names are extracted together with historical features during feature metadata extraction. Based on this, feature checks in tests are validated to use only known features to prevent tests from being silently disabled due to a invalid or misspelled feature name. --------- Co-authored-by: Lorenzo Dematte --- .../elasticsearch/features/FeatureData.java | 3 +- .../test/rest/ESRestTestCase.java | 50 +------ .../test/rest/ESRestTestFeatureService.java | 127 ++++++++++++++---- .../HistoricalFeaturesMetadataExtractor.java | 28 +++- ...toricalFeaturesMetadataExtractorTests.java | 29 +++- 5 files changed, 148 insertions(+), 89 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/features/FeatureData.java b/server/src/main/java/org/elasticsearch/features/FeatureData.java index 2dd35e648afaf..e5e8c05c456ef 100644 --- a/server/src/main/java/org/elasticsearch/features/FeatureData.java +++ b/server/src/main/java/org/elasticsearch/features/FeatureData.java @@ -38,7 +38,8 @@ private FeatureData(NavigableMap> historicalFeatures, Map specs) { Map allFeatures = new HashMap<>(); - NavigableMap> historicalFeatures = new TreeMap<>(); + // Initialize historicalFeatures with empty version to guarantee there's a floor entry for every version + NavigableMap> historicalFeatures = new TreeMap<>(Map.of(Version.V_EMPTY, Set.of())); Map nodeFeatures = new HashMap<>(); for (FeatureSpecification spec : specs) { var specFeatures = spec.getFeatures(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 6905ee391a6eb..29b74478bec6b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -60,7 +60,6 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.FeatureSpecification; @@ -88,11 +87,9 @@ import org.junit.Before; import java.io.BufferedReader; -import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.io.UncheckedIOException; import java.nio.CharBuffer; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -365,13 +362,7 @@ protected final TestFeatureService createTestFeatureService( Set semanticNodeVersions ) { // Historical features information is unavailable when using legacy test plugins - boolean hasHistoricalFeaturesInformation = System.getProperty("tests.features.metadata.path") != null; - - final List featureSpecifications = new ArrayList<>(createAdditionalFeatureSpecifications()); - featureSpecifications.add(new RestTestLegacyFeatures()); - if (hasHistoricalFeaturesInformation) { - featureSpecifications.add(new ESRestTestCaseHistoricalFeatures()); - } else { + if (ESRestTestFeatureService.hasFeatureMetadata() == false) { logger.warn( "This test is running on the legacy test framework; historical features from production code will not be available. " + "You need to port the test to the new test plugins in order to use historical features from production code. " @@ -379,9 +370,8 @@ protected final TestFeatureService createTestFeatureService( RestTestLegacyFeatures.class.getCanonicalName() ); } - return new ESRestTestFeatureService( - featureSpecifications, + createAdditionalFeatureSpecifications(), semanticNodeVersions, ClusterFeatures.calculateAllNodeFeatures(clusterStateFeatures.values()) ); @@ -2413,42 +2403,6 @@ private static boolean isMlEnabled() { } } - private static class ESRestTestCaseHistoricalFeatures implements FeatureSpecification { - private static Map historicalFeatures; - - @Override - @SuppressForbidden(reason = "File#pathSeparator has not equivalent in java.nio.file") - public Map getHistoricalFeatures() { - if (historicalFeatures == null) { - Map historicalFeaturesMap = new HashMap<>(); - String metadataPath = System.getProperty("tests.features.metadata.path"); - if (metadataPath == null) { - throw new UnsupportedOperationException( - "Historical features information is unavailable when using legacy test plugins." - ); - } - - String[] metadataFiles = metadataPath.split(File.pathSeparator); - for (String metadataFile : metadataFiles) { - try ( - InputStream in = Files.newInputStream(PathUtils.get(metadataFile)); - XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, in) - ) { - for (Map.Entry entry : parser.mapStrings().entrySet()) { - historicalFeaturesMap.put(new NodeFeature(entry.getKey()), Version.fromString(entry.getValue())); - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - historicalFeatures = Collections.unmodifiableMap(historicalFeaturesMap); - } - - return historicalFeatures; - } - } - public static void setIgnoredErrorResponseCodes(Request request, RestStatus... restStatuses) { request.addParameter( IGNORE_RESPONSE_CODES_PARAM, diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index c8647f4e9c43b..dde6784f47b06 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -10,57 +10,126 @@ import org.elasticsearch.Version; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.Strings; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.features.FeatureData; import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; -import java.util.NavigableMap; +import java.util.Map; import java.util.Set; -import java.util.function.Predicate; +import java.util.function.BiConsumer; + +import static java.util.Collections.emptySet; class ESRestTestFeatureService implements TestFeatureService { - private final Predicate historicalFeaturesPredicate; - private final Set clusterStateFeatures; private final Set allSupportedFeatures; + private final Set knownHistoricalFeatureNames; - ESRestTestFeatureService( - List specs, - Collection nodeVersions, - Set clusterStateFeatures - ) { - var minNodeVersion = nodeVersions.stream().min(Comparator.naturalOrder()); - var featureData = FeatureData.createFromSpecifications(specs); - var historicalFeatures = featureData.getHistoricalFeatures(); - Set allHistoricalFeatures = historicalFeatures.lastEntry() == null ? Set.of() : historicalFeatures.lastEntry().getValue(); - - this.allSupportedFeatures = Sets.union(clusterStateFeatures, minNodeVersion.>map(v -> { - var historicalFeaturesForVersion = historicalFeatures.floorEntry(v); - return historicalFeaturesForVersion == null ? Set.of() : historicalFeaturesForVersion.getValue(); - }).orElse(allHistoricalFeatures)); - - this.historicalFeaturesPredicate = minNodeVersion.>map( - v -> featureId -> hasHistoricalFeature(historicalFeatures, v, featureId) - ).orElse(featureId -> true); // We can safely assume that new non-semantic versions (serverless) support all historical features - this.clusterStateFeatures = clusterStateFeatures; + ESRestTestFeatureService(List featureSpecs, Collection nodeVersions, Set clusterStateFeatures) { + List specs = new ArrayList<>(featureSpecs); + specs.add(new RestTestLegacyFeatures()); + if (MetadataHolder.HISTORICAL_FEATURES != null) { + specs.add(MetadataHolder.HISTORICAL_FEATURES); + } + var historicalFeatures = FeatureData.createFromSpecifications(specs).getHistoricalFeatures(); + this.knownHistoricalFeatureNames = historicalFeatures.lastEntry().getValue(); + var minVersion = nodeVersions.stream().min(Comparator.naturalOrder()); + var supportedHistoricalFeatures = minVersion.map(v -> historicalFeatures.floorEntry(v).getValue()) + .orElse(knownHistoricalFeatureNames); + this.allSupportedFeatures = Sets.union(clusterStateFeatures, supportedHistoricalFeatures); } - private static boolean hasHistoricalFeature(NavigableMap> historicalFeatures, Version version, String featureId) { - var features = historicalFeatures.floorEntry(version); - return features != null && features.getValue().contains(featureId); + public static boolean hasFeatureMetadata() { + return MetadataHolder.HISTORICAL_FEATURES != null; } @Override public boolean clusterHasFeature(String featureId) { - if (clusterStateFeatures.contains(featureId)) { - return true; + if (hasFeatureMetadata() + && MetadataHolder.FEATURE_NAMES.contains(featureId) == false + && knownHistoricalFeatureNames.contains(featureId) == false) { + throw new IllegalArgumentException( + Strings.format( + "Unknown feature %s: check the feature has been added to the correct FeatureSpecification in the relevant module or, " + + "if this is a legacy feature used only in tests, to a test-only FeatureSpecification such as %s.", + featureId, + RestTestLegacyFeatures.class.getCanonicalName() + ) + ); } - return historicalFeaturesPredicate.test(featureId); + return allSupportedFeatures.contains(featureId); } @Override public Set getAllSupportedFeatures() { return allSupportedFeatures; } + + private static class MetadataHolder { + private static final FeatureSpecification HISTORICAL_FEATURES; + private static final Set FEATURE_NAMES; + + static { + String metadataPath = System.getProperty("tests.features.metadata.path"); + if (metadataPath == null) { + FEATURE_NAMES = emptySet(); + HISTORICAL_FEATURES = null; + } else { + Set featureNames = new HashSet<>(); + Map historicalFeatures = new HashMap<>(); + loadFeatureMetadata(metadataPath, (key, value) -> { + if (key.equals("historical_features") && value instanceof Map map) { + for (var entry : map.entrySet()) { + historicalFeatures.put(new NodeFeature((String) entry.getKey()), Version.fromString((String) entry.getValue())); + } + } + if (key.equals("feature_names") && value instanceof Collection collection) { + for (var entry : collection) { + featureNames.add((String) entry); + } + } + }); + FEATURE_NAMES = Collections.unmodifiableSet(featureNames); + Map unmodifiableHistoricalFeatures = Collections.unmodifiableMap(historicalFeatures); + HISTORICAL_FEATURES = new FeatureSpecification() { + @Override + public Map getHistoricalFeatures() { + return unmodifiableHistoricalFeatures; + } + }; + } + } + + @SuppressForbidden(reason = "File#pathSeparator has not equivalent in java.nio.file") + private static void loadFeatureMetadata(String metadataPath, BiConsumer consumer) { + String[] metadataFiles = metadataPath.split(File.pathSeparator); + for (String metadataFile : metadataFiles) { + try ( + InputStream in = Files.newInputStream(PathUtils.get(metadataFile)); + XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, in) + ) { + parser.map().forEach(consumer); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + } + } } diff --git a/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java b/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java index 33162bcfa1eca..b3941371f9291 100644 --- a/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java +++ b/test/metadata-extractor/src/main/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractor.java @@ -9,6 +9,7 @@ package org.elasticsearch.extractor.features; import org.elasticsearch.Version; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -24,8 +25,10 @@ import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.ServiceLoader; +import java.util.Set; public class HistoricalFeaturesMetadataExtractor { private final ClassLoader classLoader; @@ -62,23 +65,36 @@ public void generateMetadataFile(Path outputFile) { XContentGenerator generator = JsonXContent.jsonXContent.createGenerator(os) ) { generator.writeStartObject(); - for (Map.Entry entry : extractHistoricalFeatureMetadata().entrySet()) { - generator.writeStringField(entry.getKey().id(), entry.getValue().toString()); - } + extractHistoricalFeatureMetadata((historical, names) -> { + generator.writeFieldName("historical_features"); + generator.writeStartObject(); + for (Map.Entry entry : historical.entrySet()) { + generator.writeStringField(entry.getKey().id(), entry.getValue().toString()); + } + generator.writeEndObject(); + generator.writeFieldName("feature_names"); + generator.writeStartArray(); + for (var entry : names) { + generator.writeString(entry); + } + generator.writeEndArray(); + }); generator.writeEndObject(); } catch (IOException e) { throw new UncheckedIOException(e); } } - public Map extractHistoricalFeatureMetadata() { + void extractHistoricalFeatureMetadata(CheckedBiConsumer, Set, IOException> metadataConsumer) + throws IOException { Map historicalFeatures = new HashMap<>(); + Set featureNames = new HashSet<>(); ServiceLoader featureSpecLoader = ServiceLoader.load(FeatureSpecification.class, classLoader); for (FeatureSpecification featureSpecification : featureSpecLoader) { historicalFeatures.putAll(featureSpecification.getHistoricalFeatures()); + featureSpecification.getFeatures().stream().map(NodeFeature::id).forEach(featureNames::add); } - - return historicalFeatures; + metadataConsumer.accept(historicalFeatures, featureNames); } private static void printUsageAndExit() { diff --git a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java index ba80decd046e6..a4cfcae198553 100644 --- a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java +++ b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java @@ -19,11 +19,18 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; import java.util.Map; +import java.util.Set; import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY; import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase { @@ -33,16 +40,28 @@ public class HistoricalFeaturesMetadataExtractorTests extends ESTestCase { public void testExtractHistoricalMetadata() throws IOException { HistoricalFeaturesMetadataExtractor extractor = new HistoricalFeaturesMetadataExtractor(this.getClass().getClassLoader()); - Map nodeFeatureVersionMap = extractor.extractHistoricalFeatureMetadata(); + Map nodeFeatureVersionMap = new HashMap<>(); + Set featureNamesSet = new HashSet<>(); + extractor.extractHistoricalFeatureMetadata((historical, names) -> { + nodeFeatureVersionMap.putAll(historical); + featureNamesSet.addAll(names); + }); assertThat(nodeFeatureVersionMap, not(anEmptyMap())); + assertThat(featureNamesSet, not(empty())); Path outputFile = temporaryFolder.newFile().toPath(); extractor.generateMetadataFile(outputFile); try (XContentParser parser = JsonXContent.jsonXContent.createParser(EMPTY, Files.newInputStream(outputFile))) { - Map parsedMap = parser.mapStrings(); - for (Map.Entry entry : nodeFeatureVersionMap.entrySet()) { - assertThat(parsedMap, hasEntry(entry.getKey().id(), entry.getValue().toString())); - } + Map parsedMap = parser.map(); + assertThat(parsedMap, hasKey("historical_features")); + assertThat(parsedMap, hasKey("feature_names")); + @SuppressWarnings("unchecked") + Map historicalFeaturesMap = (Map) (parsedMap.get("historical_features")); + nodeFeatureVersionMap.forEach((key, value) -> assertThat(historicalFeaturesMap, hasEntry(key.id(), value.toString()))); + + @SuppressWarnings("unchecked") + Collection featureNamesList = (Collection) (parsedMap.get("feature_names")); + assertThat(featureNamesList, containsInAnyOrder(featureNamesSet.toArray())); } } } From 917f54a08f01fc6c695d022bf4634380f0ec6373 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 28 Mar 2024 15:10:48 +0100 Subject: [PATCH 06/69] Extend support of allowedFields to getMatchingFieldNames and getAllFields (#106862) The SearchExecutionContext supports the notion of allowed fields, provided via a specific setter method. Fields are though only filtered for the getFieldType method. There needs to be consistency between getMatchingFieldNames and getFieldType. In fact there are places in the code where getMatchingFieldNames is called to resolve field name patterns, and later getFieldType is called on each of the resolved fields. If the former resolves to one field that we can't retrieve a field type for, that is unexpected and to be considered a bug. In addition, this commit adds consistency for getAllFields: this is only called by field caps, hence a different codepath that does not seem to set allowed fields for now, but it's important for the context to provide consistency around fields access, especially for methods that are as broad as getAllFields, despite their currently very specific usage. This surfaced as we are trying to move fetching of the `_ignored` field to use value fetchers, which use a search execution context and resolve the field type, whereas until now they are retrieved directly via StoredFieldsPhase and completely bypass such check. This commit also adds a test that was missing around verifying that SearchExecutionContext applies the allowedFields predicate when provided. --- docs/changelog/106862.yaml | 5 ++ .../index/query/QueryRewriteContext.java | 47 ++++++----- .../query/SearchExecutionContextTests.java | 79 +++++++++++++++++++ 3 files changed, 113 insertions(+), 18 deletions(-) create mode 100644 docs/changelog/106862.yaml diff --git a/docs/changelog/106862.yaml b/docs/changelog/106862.yaml new file mode 100644 index 0000000000000..3ca2660fc3f73 --- /dev/null +++ b/docs/changelog/106862.yaml @@ -0,0 +1,5 @@ +pr: 106862 +summary: Extend support of `allowedFields` to `getMatchingFieldNames` and `getAllFields` +area: "Mapping" +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index 6ab5d6d77d86d..fd8d3794cf2d8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -37,6 +37,7 @@ import java.util.function.BooleanSupplier; import java.util.function.LongSupplier; import java.util.function.Predicate; +import java.util.stream.Collectors; /** * Context object used to rewrite {@link QueryBuilder} instances into simplified version. @@ -318,35 +319,45 @@ public boolean indexMatches(String pattern) { * @param pattern the field name pattern */ public Set getMatchingFieldNames(String pattern) { + Set matches; if (runtimeMappings.isEmpty()) { - return mappingLookup.getMatchingFieldNames(pattern); - } - Set matches = new HashSet<>(mappingLookup.getMatchingFieldNames(pattern)); - if ("*".equals(pattern)) { - matches.addAll(runtimeMappings.keySet()); - } else if (Regex.isSimpleMatchPattern(pattern) == false) { - // no wildcard - if (runtimeMappings.containsKey(pattern)) { - matches.add(pattern); - } + matches = mappingLookup.getMatchingFieldNames(pattern); } else { - for (String name : runtimeMappings.keySet()) { - if (Regex.simpleMatch(pattern, name)) { - matches.add(name); + matches = new HashSet<>(mappingLookup.getMatchingFieldNames(pattern)); + if ("*".equals(pattern)) { + matches.addAll(runtimeMappings.keySet()); + } else if (Regex.isSimpleMatchPattern(pattern) == false) { + // no wildcard + if (runtimeMappings.containsKey(pattern)) { + matches.add(pattern); + } + } else { + for (String name : runtimeMappings.keySet()) { + if (Regex.simpleMatch(pattern, name)) { + matches.add(name); + } } } } - return matches; + // If the field is not allowed, behave as if it is not mapped + return allowedFields == null ? matches : matches.stream().filter(allowedFields).collect(Collectors.toSet()); } /** * @return An {@link Iterable} with key the field name and value the MappedFieldType */ public Iterable> getAllFields() { - var allFromMapping = mappingLookup.getFullNameToFieldType(); - // runtime mappings and non-runtime fields don't overlap, so we can simply concatenate the iterables here - return runtimeMappings.isEmpty() + Map allFromMapping = mappingLookup.getFullNameToFieldType(); + Set> allEntrySet = allowedFields == null ? allFromMapping.entrySet() - : () -> Iterators.concat(allFromMapping.entrySet().iterator(), runtimeMappings.entrySet().iterator()); + : allFromMapping.entrySet().stream().filter(entry -> allowedFields.test(entry.getKey())).collect(Collectors.toSet()); + if (runtimeMappings.isEmpty()) { + return allEntrySet; + } + Set> runtimeEntrySet = allowedFields == null + ? runtimeMappings.entrySet() + : runtimeMappings.entrySet().stream().filter(entry -> allowedFields.test(entry.getKey())).collect(Collectors.toSet()); + // runtime mappings and non-runtime fields don't overlap, so we can simply concatenate the iterables here + return () -> Iterators.concat(allEntrySet.iterator(), runtimeEntrySet.iterator()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 6d671a258c26a..2f31bac135716 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -96,6 +96,7 @@ import java.util.stream.Collectors; import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; @@ -401,6 +402,84 @@ public void testSyntheticSourceSearchLookup() throws IOException { assertEquals("meow", source.source().get("cat")); } + public void testAllowedFields() { + Map runtimeMappings = Map.ofEntries( + Map.entry("runtimecat", Map.of("type", "keyword")), + Map.entry("runtimedog", Map.of("type", "long")) + ); + SearchExecutionContext context = createSearchExecutionContext( + "uuid", + null, + SearchExecutionContextTests.createMappingLookup( + List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")), + List.of(new TestRuntimeField("runtime", "long")) + ), + runtimeMappings + ); + + assertNotNull(context.getFieldType("pig")); + assertNotNull(context.getFieldType("cat")); + assertNotNull(context.getFieldType("runtimecat")); + assertNotNull(context.getFieldType("runtimedog")); + assertNotNull(context.getFieldType("runtime")); + assertEquals(3, context.getMatchingFieldNames("runtime*").size()); + assertEquals(2, context.getMatchingFieldNames("*cat").size()); + assertThat(getFieldNames(context.getAllFields()), containsInAnyOrder("pig", "cat", "runtimecat", "runtimedog", "runtime")); + + context.setAllowedFields(s -> true); + assertNotNull(context.getFieldType("pig")); + assertTrue(context.isFieldMapped("pig")); + assertNotNull(context.getFieldType("cat")); + assertTrue(context.isFieldMapped("cat")); + assertNotNull(context.getFieldType("runtimecat")); + assertTrue(context.isFieldMapped("runtimecat")); + assertNotNull(context.getFieldType("runtimedog")); + assertTrue(context.isFieldMapped("runtimedog")); + assertNotNull(context.getFieldType("runtime")); + assertTrue(context.isFieldMapped("runtime")); + assertEquals(3, context.getMatchingFieldNames("runtime*").size()); + assertEquals(2, context.getMatchingFieldNames("*cat").size()); + assertThat(getFieldNames(context.getAllFields()), containsInAnyOrder("pig", "cat", "runtimecat", "runtimedog", "runtime")); + + context.setAllowedFields(s -> s.equals("cat")); + assertNull(context.getFieldType("pig")); + assertFalse(context.isFieldMapped("pig")); + assertNotNull(context.getFieldType("cat")); + assertTrue(context.isFieldMapped("cat")); + assertNull(context.getFieldType("runtimecat")); + assertFalse(context.isFieldMapped("runtimecat")); + assertNull(context.getFieldType("runtimedog")); + assertFalse(context.isFieldMapped("runtimedog")); + assertNull(context.getFieldType("runtime")); + assertFalse(context.isFieldMapped("runtime")); + assertEquals(0, context.getMatchingFieldNames("runtime*").size()); + assertEquals(1, context.getMatchingFieldNames("*cat").size()); + assertThat(getFieldNames(context.getAllFields()), containsInAnyOrder("cat")); + + context.setAllowedFields(s -> s.contains("dog") == false); + assertNotNull(context.getFieldType("pig")); + assertTrue(context.isFieldMapped("pig")); + assertNotNull(context.getFieldType("cat")); + assertTrue(context.isFieldMapped("cat")); + assertNotNull(context.getFieldType("runtimecat")); + assertTrue(context.isFieldMapped("runtimecat")); + assertNull(context.getFieldType("runtimedog")); + assertFalse(context.isFieldMapped("runtimedog")); + assertNotNull(context.getFieldType("runtime")); + assertTrue(context.isFieldMapped("runtime")); + assertEquals(2, context.getMatchingFieldNames("runtime*").size()); + assertEquals(2, context.getMatchingFieldNames("*cat").size()); + assertThat(getFieldNames(context.getAllFields()), containsInAnyOrder("pig", "cat", "runtimecat", "runtime")); + } + + private static List getFieldNames(Iterable> fields) { + List fieldNames = new ArrayList<>(); + for (Map.Entry field : fields) { + fieldNames.add(field.getKey()); + } + return fieldNames; + } + public static SearchExecutionContext createSearchExecutionContext(String indexUuid, String clusterAlias) { return createSearchExecutionContext(indexUuid, clusterAlias, MappingLookup.EMPTY, Map.of()); } From f7532bd88b9964fc3e6765176f47cfd425337877 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Thu, 28 Mar 2024 10:51:57 -0400 Subject: [PATCH 07/69] [Transform] Rearrange failure reason (#106826) Stop and Start error messages include the reason for the error followed by the suggestion to use force=true. This may cause the suggestion to be hidden by the reason, so we will move the reason after the suggestion. Close #106819 --- .../core/transform/TransformMessages.java | 11 +-- .../TransformTaskFailedStateIT.java | 50 +++++------ .../action/TransportStopTransformAction.java | 17 ++-- .../transform/transforms/TransformTask.java | 4 +- .../TransportStopTransformActionTests.java | 85 ++++++++++++------- .../transforms/TransformTaskTests.java | 4 +- 6 files changed, 98 insertions(+), 73 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java index 6f1fdb8a20cae..8c3b133d580a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformMessages.java @@ -35,11 +35,12 @@ public class TransformMessages { public static final String REST_WARN_NO_TRANSFORM_NODES = "Transform requires the transform node role for at least 1 node, found no transform nodes"; - public static final String CANNOT_STOP_FAILED_TRANSFORM = "Unable to stop transform [{0}] as it is in a failed state with reason [{1}]." - + " Use force stop to stop the transform."; - public static final String CANNOT_START_FAILED_TRANSFORM = - "Unable to start transform [{0}] as it is in a failed state with failure: [{1}]. " - + "Use force stop and then restart the transform once error is resolved."; + public static final String CANNOT_STOP_SINGLE_FAILED_TRANSFORM = "Unable to stop transform [{0}] as it is in a failed state. " + + "Use force stop to stop the transform. More details: [{1}]"; + public static final String CANNOT_STOP_MULTIPLE_FAILED_TRANSFORMS = "Unable to stop transforms. The following transforms are in a " + + "failed state [{0}]. Use force stop to stop the transforms. More details: [{1}]"; + public static final String CANNOT_START_FAILED_TRANSFORM = "Unable to start transform [{0}] as it is in a failed state. " + + "Use force stop and then restart the transform once error is resolved. More details: [{1}]"; public static final String FAILED_TO_CREATE_DESTINATION_INDEX = "Could not create destination index [{0}] for transform [{1}]"; public static final String FAILED_TO_SET_UP_DESTINATION_ALIASES = diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java index 1abf611e833c4..bccd97f22b4a1 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformTaskFailedStateIT.java @@ -28,8 +28,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.matchesRegex; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; public class TransformTaskFailedStateIT extends TransformRestTestCase { @@ -69,13 +69,13 @@ public void testForceStopFailedTransform() throws Exception { startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); Map fullState = getTransformStateAndStats(transformId); - final String failureReason = "Failed to index documents into destination index due to permanent error: " - + "\\[org.elasticsearch.xpack.transform.transforms.BulkIndexingException: Bulk index experienced \\[7\\] " + var failureReason = "Failed to index documents into destination index due to permanent error: " + + "[org.elasticsearch.xpack.transform.transforms.BulkIndexingException: Bulk index experienced [7] " + "failures and at least 1 irrecoverable " - + "\\[org.elasticsearch.xpack.transform.transforms.TransformException: Destination index mappings are " - + "incompatible with the transform configuration.;.*"; + + "[org.elasticsearch.xpack.transform.transforms.TransformException: Destination index mappings are " + + "incompatible with the transform configuration.;"; // Verify we have failed for the expected reason - assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); @@ -84,10 +84,10 @@ public void testForceStopFailedTransform() throws Exception { assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.CONFLICT.getStatus())); assertThat( (String) XContentMapValues.extractValue("error.reason", entityAsMap(ex.getResponse())), - matchesRegex( - "Unable to stop transform \\[test-force-stop-failed-transform\\] as it is in a failed state with reason \\[" + startsWith( + "Unable to stop transform [test-force-stop-failed-transform] as it is in a failed state. " + + "Use force stop to stop the transform. More details: [" + failureReason - + "\\]. Use force stop to stop the transform." ) ); @@ -113,13 +113,13 @@ public void testForceResetFailedTransform() throws Exception { startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); Map fullState = getTransformStateAndStats(transformId); - final String failureReason = "Failed to index documents into destination index due to permanent error: " - + "\\[org.elasticsearch.xpack.transform.transforms.BulkIndexingException: Bulk index experienced \\[7\\] " + var failureReason = "Failed to index documents into destination index due to permanent error: " + + "[org.elasticsearch.xpack.transform.transforms.BulkIndexingException: Bulk index experienced [7] " + "failures and at least 1 irrecoverable " - + "\\[org.elasticsearch.xpack.transform.transforms.TransformException: Destination index mappings are " - + "incompatible with the transform configuration.;.*"; + + "[org.elasticsearch.xpack.transform.transforms.TransformException: Destination index mappings are " + + "incompatible with the transform configuration.;"; // Verify we have failed for the expected reason - assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); @@ -149,28 +149,24 @@ public void testStartFailedTransform() throws Exception { startTransform(transformId); awaitState(transformId, TransformStats.State.FAILED); Map fullState = getTransformStateAndStats(transformId); - final String failureReason = "Failed to index documents into destination index due to permanent error: " - + "\\[org.elasticsearch.xpack.transform.transforms.BulkIndexingException: Bulk index experienced \\[7\\] " + var failureReason = "Failed to index documents into destination index due to permanent error: " + + "[org.elasticsearch.xpack.transform.transforms.BulkIndexingException: Bulk index experienced [7] " + "failures and at least 1 irrecoverable " - + "\\[org.elasticsearch.xpack.transform.transforms.TransformException: Destination index mappings are " - + "incompatible with the transform configuration.;.*"; + + "[org.elasticsearch.xpack.transform.transforms.TransformException: Destination index mappings are " + + "incompatible with the transform configuration.;"; // Verify we have failed for the expected reason - assertThat((String) XContentMapValues.extractValue("reason", fullState), matchesRegex(failureReason)); + assertThat((String) XContentMapValues.extractValue("reason", fullState), startsWith(failureReason)); assertThat(getTransformTasks(), hasSize(1)); - final String expectedFailure = "Unable to start transform \\[test-force-start-failed-transform\\] " - + "as it is in a failed state with failure: \\[" - + failureReason - + "\\]. Use force stop and then restart the transform once error is resolved."; + var expectedFailure = "Unable to start transform [test-force-start-failed-transform] " + + "as it is in a failed state. Use force stop and then restart the transform once error is resolved. More details: [" + + failureReason; // Verify that we cannot start the transform when the task is in a failed state assertBusy(() -> { ResponseException ex = expectThrows(ResponseException.class, () -> startTransform(transformId)); assertThat(ex.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.CONFLICT.getStatus())); - assertThat( - (String) XContentMapValues.extractValue("error.reason", entityAsMap(ex.getResponse())), - matchesRegex(expectedFailure) - ); + assertThat((String) XContentMapValues.extractValue("error.reason", entityAsMap(ex.getResponse())), startsWith(expectedFailure)); }, 60, TimeUnit.SECONDS); stopTransform(transformId, true); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java index a309aaa2e4e0e..b8ea1fee6e886 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportStopTransformAction.java @@ -61,7 +61,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.xpack.core.transform.TransformMessages.CANNOT_STOP_FAILED_TRANSFORM; +import static org.elasticsearch.xpack.core.transform.TransformMessages.CANNOT_STOP_MULTIPLE_FAILED_TRANSFORMS; +import static org.elasticsearch.xpack.core.transform.TransformMessages.CANNOT_STOP_SINGLE_FAILED_TRANSFORM; public class TransportStopTransformAction extends TransportTasksAction { @@ -112,12 +113,12 @@ static void validateTaskState(ClusterState state, List transformIds, boo } if (failedTasks.isEmpty() == false) { String msg = failedTasks.size() == 1 - ? TransformMessages.getMessage(CANNOT_STOP_FAILED_TRANSFORM, failedTasks.get(0), failedReasons.get(0)) - : "Unable to stop transforms. The following transforms are in a failed state " - + failedTasks - + " with reasons " - + failedReasons - + ". Use force stop to stop the transforms."; + ? TransformMessages.getMessage(CANNOT_STOP_SINGLE_FAILED_TRANSFORM, failedTasks.get(0), failedReasons.get(0)) + : TransformMessages.getMessage( + CANNOT_STOP_MULTIPLE_FAILED_TRANSFORMS, + String.join(", ", failedTasks), + String.join(", ", failedReasons) + ); throw new ElasticsearchStatusException(msg, RestStatus.CONFLICT); } } @@ -409,7 +410,7 @@ private void waitForTransformStopped( exceptions.put( persistentTaskId, new ElasticsearchStatusException( - TransformMessages.getMessage(CANNOT_STOP_FAILED_TRANSFORM, persistentTaskId, taskState.getReason()), + TransformMessages.getMessage(CANNOT_STOP_SINGLE_FAILED_TRANSFORM, persistentTaskId, taskState.getReason()), RestStatus.CONFLICT ) ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java index dbfc30a38f4c3..8eecd20c95ccd 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java @@ -53,7 +53,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.transform.TransformMessages.CANNOT_START_FAILED_TRANSFORM; -import static org.elasticsearch.xpack.core.transform.TransformMessages.CANNOT_STOP_FAILED_TRANSFORM; +import static org.elasticsearch.xpack.core.transform.TransformMessages.CANNOT_STOP_SINGLE_FAILED_TRANSFORM; public class TransformTask extends AllocatedPersistentTask implements TransformScheduler.Listener, TransformContext.Listener { @@ -359,7 +359,7 @@ public void stop(boolean force, boolean shouldStopAtCheckpoint) { synchronized (context) { if (context.getTaskState() == TransformTaskState.FAILED && force == false) { throw new ElasticsearchStatusException( - TransformMessages.getMessage(CANNOT_STOP_FAILED_TRANSFORM, getTransformId(), context.getStateReason()), + TransformMessages.getMessage(CANNOT_STOP_SINGLE_FAILED_TRANSFORM, getTransformId(), context.getStateReason()), RestStatus.CONFLICT ); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java index 21fa6679cb2d6..59959edc7232a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportStopTransformActionTests.java @@ -17,14 +17,11 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; -import org.elasticsearch.xpack.core.transform.TransformMessages; import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import static org.elasticsearch.rest.RestStatus.CONFLICT; @@ -39,11 +36,11 @@ private Metadata.Builder buildMetadata(PersistentTasksCustomMetadata ptasks) { public void testTaskStateValidationWithNoTasks() { Metadata.Builder metadata = Metadata.builder(); ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")).metadata(metadata); - TransportStopTransformAction.validateTaskState(csBuilder.build(), Collections.singletonList("non-failed-task"), false); + TransportStopTransformAction.validateTaskState(csBuilder.build(), List.of("non-failed-task"), false); PersistentTasksCustomMetadata.Builder pTasksBuilder = PersistentTasksCustomMetadata.builder(); csBuilder = ClusterState.builder(new ClusterName("_name")).metadata(buildMetadata(pTasksBuilder.build())); - TransportStopTransformAction.validateTaskState(csBuilder.build(), Collections.singletonList("non-failed-task"), false); + TransportStopTransformAction.validateTaskState(csBuilder.build(), List.of("non-failed-task"), false); } public void testTaskStateValidationWithTransformTasks() { @@ -57,7 +54,7 @@ public void testTaskStateValidationWithTransformTasks() { ); ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")).metadata(buildMetadata(pTasksBuilder.build())); - TransportStopTransformAction.validateTaskState(csBuilder.build(), Collections.singletonList("non-failed-task"), false); + TransportStopTransformAction.validateTaskState(csBuilder.build(), List.of("non-failed-task"), false); // test again with a non failed task but this time it has internal state pTasksBuilder.updateTaskState( @@ -66,8 +63,9 @@ public void testTaskStateValidationWithTransformTasks() { ); csBuilder = ClusterState.builder(new ClusterName("_name")).metadata(buildMetadata(pTasksBuilder.build())); - TransportStopTransformAction.validateTaskState(csBuilder.build(), Collections.singletonList("non-failed-task"), false); + TransportStopTransformAction.validateTaskState(csBuilder.build(), List.of("non-failed-task"), false); + // test again with one failed task pTasksBuilder.addTask( "failed-task", TransformTaskParams.NAME, @@ -80,20 +78,60 @@ public void testTaskStateValidationWithTransformTasks() { ); final ClusterState cs = ClusterState.builder(new ClusterName("_name")).metadata(buildMetadata(pTasksBuilder.build())).build(); - TransportStopTransformAction.validateTaskState(cs, Arrays.asList("non-failed-task", "failed-task"), true); + TransportStopTransformAction.validateTaskState(cs, List.of("non-failed-task", "failed-task"), true); - TransportStopTransformAction.validateTaskState(cs, Collections.singletonList("non-failed-task"), false); + TransportStopTransformAction.validateTaskState(cs, List.of("non-failed-task"), false); ClusterState.Builder csBuilderFinal = ClusterState.builder(new ClusterName("_name")).metadata(buildMetadata(pTasksBuilder.build())); ElasticsearchStatusException ex = expectThrows( ElasticsearchStatusException.class, - () -> TransportStopTransformAction.validateTaskState(csBuilderFinal.build(), Collections.singletonList("failed-task"), false) + () -> TransportStopTransformAction.validateTaskState(csBuilderFinal.build(), List.of("failed-task"), false) ); assertThat(ex.status(), equalTo(CONFLICT)); assertThat( ex.getMessage(), - equalTo(TransformMessages.getMessage(TransformMessages.CANNOT_STOP_FAILED_TRANSFORM, "failed-task", "task has failed")) + equalTo( + "Unable to stop transform [failed-task] as it is in a failed state. Use force stop to stop the transform. " + + "More details: [task has failed]" + ) + ); + + // test again with two failed tasks + pTasksBuilder.addTask( + "failed-task-2", + TransformTaskParams.NAME, + new TransformTaskParams("transform-task-2", TransformConfigVersion.CURRENT, null, false), + new PersistentTasksCustomMetadata.Assignment("current-data-node-with-2-tasks", "") + ) + .updateTaskState( + "failed-task-2", + new TransformState( + TransformTaskState.FAILED, + IndexerState.STOPPED, + null, + 0L, + "task has also failed", + null, + null, + false, + null + ) + ); + + var csBuilderMultiTask = ClusterState.builder(new ClusterName("_name")).metadata(buildMetadata(pTasksBuilder.build())); + ex = expectThrows( + ElasticsearchStatusException.class, + () -> TransportStopTransformAction.validateTaskState(csBuilderMultiTask.build(), List.of("failed-task", "failed-task-2"), false) + ); + + assertThat(ex.status(), equalTo(CONFLICT)); + assertThat( + ex.getMessage(), + equalTo( + "Unable to stop transforms. The following transforms are in a failed state [failed-task, failed-task-2]. Use force " + + "stop to stop the transforms. More details: [task has failed, task has also failed]" + ) ); } @@ -106,38 +144,27 @@ public void testFirstNotOKStatus() { ); taskOperationFailures.add(new TaskOperationFailure("node", 1, new ElasticsearchStatusException("failure", RestStatus.BAD_REQUEST))); - assertThat( - TransportStopTransformAction.firstNotOKStatus(Collections.emptyList(), Collections.emptyList()), - equalTo(RestStatus.INTERNAL_SERVER_ERROR) - ); + assertThat(TransportStopTransformAction.firstNotOKStatus(List.of(), List.of()), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); - assertThat( - TransportStopTransformAction.firstNotOKStatus(taskOperationFailures, Collections.emptyList()), - equalTo(RestStatus.BAD_REQUEST) - ); + assertThat(TransportStopTransformAction.firstNotOKStatus(taskOperationFailures, List.of()), equalTo(RestStatus.BAD_REQUEST)); assertThat(TransportStopTransformAction.firstNotOKStatus(taskOperationFailures, nodeFailures), equalTo(RestStatus.BAD_REQUEST)); assertThat( TransportStopTransformAction.firstNotOKStatus( taskOperationFailures, - Collections.singletonList(new ElasticsearchException(new ElasticsearchStatusException("not failure", RestStatus.OK))) + List.of(new ElasticsearchException(new ElasticsearchStatusException("not failure", RestStatus.OK))) ), equalTo(RestStatus.BAD_REQUEST) ); assertThat( TransportStopTransformAction.firstNotOKStatus( - Collections.singletonList( - new TaskOperationFailure("node", 1, new ElasticsearchStatusException("not failure", RestStatus.OK)) - ), + List.of(new TaskOperationFailure("node", 1, new ElasticsearchStatusException("not failure", RestStatus.OK))), nodeFailures ), equalTo(RestStatus.INTERNAL_SERVER_ERROR) ); - assertThat( - TransportStopTransformAction.firstNotOKStatus(Collections.emptyList(), nodeFailures), - equalTo(RestStatus.INTERNAL_SERVER_ERROR) - ); + assertThat(TransportStopTransformAction.firstNotOKStatus(List.of(), nodeFailures), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); } public void testBuildException() { @@ -160,12 +187,12 @@ public void testBuildException() { assertThat(statusException.getMessage(), equalTo(taskOperationFailures.get(0).getCause().getMessage())); assertThat(statusException.getSuppressed().length, equalTo(1)); - statusException = TransportStopTransformAction.buildException(Collections.emptyList(), nodeFailures, status); + statusException = TransportStopTransformAction.buildException(List.of(), nodeFailures, status); assertThat(statusException.status(), equalTo(status)); assertThat(statusException.getMessage(), equalTo(nodeFailures.get(0).getMessage())); assertThat(statusException.getSuppressed().length, equalTo(0)); - statusException = TransportStopTransformAction.buildException(taskOperationFailures, Collections.emptyList(), status); + statusException = TransportStopTransformAction.buildException(taskOperationFailures, List.of(), status); assertThat(statusException.status(), equalTo(status)); assertThat(statusException.getMessage(), equalTo(taskOperationFailures.get(0).getCause().getMessage())); assertThat(statusException.getSuppressed().length, equalTo(0)); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java index a34d35e4d3cb5..fbf59348e8152 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java @@ -151,7 +151,7 @@ public void testStopOnFailedTaskWithStoppedIndexer() { equalTo( "Unable to stop transform [" + transformConfig.getId() - + "] as it is in a failed state with reason [because]. Use force stop to stop the transform." + + "] as it is in a failed state. Use force stop to stop the transform. More details: [because]" ) ); @@ -256,7 +256,7 @@ public void testStopOnFailedTaskWithoutIndexer() { equalTo( "Unable to stop transform [" + transformConfig.getId() - + "] as it is in a failed state with reason [because]. Use force stop to stop the transform." + + "] as it is in a failed state. Use force stop to stop the transform. More details: [because]" ) ); From 3350a9f956889c9e1e66b74ae730506d63567670 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 28 Mar 2024 08:00:27 -0700 Subject: [PATCH 08/69] Return specific type for Block#expand (#106850) Like Block#filter, Block#expand should return the specific type of the original block, rather than a generic block type. For instance, the expanded block of an IntBlock should also be an IntBlock. I encountered a situation where I had to cast the expanded block. --- .../org/elasticsearch/compute/data/BooleanBlock.java | 3 +++ .../org/elasticsearch/compute/data/BooleanVectorBlock.java | 6 ++++++ .../org/elasticsearch/compute/data/BytesRefBlock.java | 3 +++ .../org/elasticsearch/compute/data/BytesRefVectorBlock.java | 6 ++++++ .../org/elasticsearch/compute/data/DoubleBlock.java | 3 +++ .../org/elasticsearch/compute/data/DoubleVectorBlock.java | 6 ++++++ .../org/elasticsearch/compute/data/IntBlock.java | 3 +++ .../org/elasticsearch/compute/data/IntVectorBlock.java | 6 ++++++ .../org/elasticsearch/compute/data/LongBlock.java | 3 +++ .../org/elasticsearch/compute/data/LongVectorBlock.java | 6 ++++++ .../org/elasticsearch/compute/data/AbstractVectorBlock.java | 6 ------ .../org/elasticsearch/compute/data/ConstantNullBlock.java | 2 +- .../main/java/org/elasticsearch/compute/data/DocBlock.java | 6 ++++++ .../java/org/elasticsearch/compute/data/X-Block.java.st | 3 +++ .../org/elasticsearch/compute/data/X-VectorBlock.java.st | 6 ++++++ 15 files changed, 61 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index ecc2d03105998..617cb731da656 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -38,6 +38,9 @@ public sealed interface BooleanBlock extends Block permits BooleanArrayBlock, Bo @Override BooleanBlock filter(int... positions); + @Override + BooleanBlock expand(); + @Override default String getWriteableName() { return "BooleanBlock"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index a42e9b148064d..70fcfeca94869 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -49,6 +49,12 @@ public BooleanBlock filter(int... positions) { return vector.filter(positions).asBlock(); } + @Override + public BooleanBlock expand() { + incRef(); + return this; + } + @Override public long ramBytesUsed() { return vector.ramBytesUsed(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 8331d948ca329..0b70fa0f378eb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -42,6 +42,9 @@ public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, @Override BytesRefBlock filter(int... positions); + @Override + BytesRefBlock expand(); + @Override default String getWriteableName() { return "BytesRefBlock"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 1a077f38385e3..8c8c3b59ff758 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -50,6 +50,12 @@ public BytesRefBlock filter(int... positions) { return vector.filter(positions).asBlock(); } + @Override + public BytesRefBlock expand() { + incRef(); + return this; + } + @Override public long ramBytesUsed() { return vector.ramBytesUsed(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 3a539ebd00d27..5fe36081478f3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -38,6 +38,9 @@ public sealed interface DoubleBlock extends Block permits DoubleArrayBlock, Doub @Override DoubleBlock filter(int... positions); + @Override + DoubleBlock expand(); + @Override default String getWriteableName() { return "DoubleBlock"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index 647849a968df9..eec6675e93ae7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -49,6 +49,12 @@ public DoubleBlock filter(int... positions) { return vector.filter(positions).asBlock(); } + @Override + public DoubleBlock expand() { + incRef(); + return this; + } + @Override public long ramBytesUsed() { return vector.ramBytesUsed(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 81c4dffa50ded..057fcd066ad76 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -38,6 +38,9 @@ public sealed interface IntBlock extends Block permits IntArrayBlock, IntVectorB @Override IntBlock filter(int... positions); + @Override + IntBlock expand(); + @Override default String getWriteableName() { return "IntBlock"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 4f9bb236dfa80..39f8426a8da3a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -49,6 +49,12 @@ public IntBlock filter(int... positions) { return vector.filter(positions).asBlock(); } + @Override + public IntBlock expand() { + incRef(); + return this; + } + @Override public long ramBytesUsed() { return vector.ramBytesUsed(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 1504f6f7d9100..fb75ac3303201 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -38,6 +38,9 @@ public sealed interface LongBlock extends Block permits LongArrayBlock, LongVect @Override LongBlock filter(int... positions); + @Override + LongBlock expand(); + @Override default String getWriteableName() { return "LongBlock"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index 0d7d1f691837f..b573e025c0be1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -49,6 +49,12 @@ public LongBlock filter(int... positions) { return vector.filter(positions).asBlock(); } + @Override + public LongBlock expand() { + incRef(); + return this; + } + @Override public long ramBytesUsed() { return vector.ramBytesUsed(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java index 452bdad1ab192..027eda8eb9be3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBlock.java @@ -55,10 +55,4 @@ public final boolean mayHaveMultivaluedFields() { public final MvOrdering mvOrdering() { return MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING; } - - @Override - public final Block expand() { - incRef(); - return this; - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index eb86d01fbdf3c..c2ac99a7c8489 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -97,7 +97,7 @@ public MvOrdering mvOrdering() { } @Override - public Block expand() { + public ConstantNullBlock expand() { incRef(); return this; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index a58b8c34b17d5..8d3497a66a2d7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -48,6 +48,12 @@ public Block filter(int... positions) { return new DocBlock(asVector().filter(positions)); } + @Override + public DocBlock expand() { + incRef(); + return this; + } + @Override public int hashCode() { return vector.hashCode(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 04f91f166b9d2..f86f86500529c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -53,6 +53,9 @@ $endif$ @Override $Type$Block filter(int... positions); + @Override + $Type$Block expand(); + @Override default String getWriteableName() { return "$Type$Block"; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index e3d696ddf9120..8df5cea4c883b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -57,6 +57,12 @@ $endif$ return vector.filter(positions).asBlock(); } + @Override + public $Type$Block expand() { + incRef(); + return this; + } + @Override public long ramBytesUsed() { return vector.ramBytesUsed(); From f0b61f864f71228c9d123ba190886903493034ef Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Thu, 28 Mar 2024 17:31:50 +0100 Subject: [PATCH 09/69] Enable data-streams module in REST tests (#106875) --- qa/smoke-test-multinode/build.gradle | 1 + .../smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java | 1 + rest-api-spec/build.gradle | 1 + .../java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java | 1 + x-pack/qa/core-rest-tests-with-security/build.gradle | 1 + .../xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java | 1 + 6 files changed, 6 insertions(+) diff --git a/qa/smoke-test-multinode/build.gradle b/qa/smoke-test-multinode/build.gradle index f5beef38319e5..fa60e75ce067e 100644 --- a/qa/smoke-test-multinode/build.gradle +++ b/qa/smoke-test-multinode/build.gradle @@ -19,6 +19,7 @@ dependencies { clusterModules project(":modules:reindex") clusterModules project(":modules:analysis-common") clusterModules project(":modules:health-shards-availability") + clusterModules project(":modules:data-streams") } tasks.named("yamlRestTest").configure { diff --git a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java index 9afb533b037b4..63ada4c416f03 100644 --- a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java +++ b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java @@ -30,6 +30,7 @@ public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTe .module("reindex") .module("analysis-common") .module("health-shards-availability") + .module("data-streams") // The first node does not have the ingest role so we're sure ingest requests are forwarded: .node(0, n -> n.setting("node.roles", "[master,data,ml,remote_cluster_client,transform]")) .feature(FeatureFlag.TIME_SERIES_MODE) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 787d684c3779e..146c78e3c8471 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -38,6 +38,7 @@ dependencies { clusterModules project(":modules:reindex") clusterModules project(':modules:analysis-common') clusterModules project(':modules:health-shards-availability') + clusterModules project(':modules:data-streams') } tasks.named("yamlRestTestV7CompatTransform").configure { task -> diff --git a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java index 2b3bab21e8ae6..5017ee06c7a8a 100644 --- a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java +++ b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java @@ -33,6 +33,7 @@ public class ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { .module("reindex") .module("analysis-common") .module("health-shards-availability") + .module("data-streams") .feature(FeatureFlag.TIME_SERIES_MODE) .build(); diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index dda8d6a249bc4..0b8e459ed231b 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -9,6 +9,7 @@ dependencies { clusterModules project(':modules:reindex') clusterModules project(':modules:analysis-common') clusterModules project(':modules:health-shards-availability') + clusterModules project(':modules:data-streams') clusterModules project(xpackModule('stack')) clusterModules project(xpackModule('ilm')) clusterModules project(xpackModule('mapper-constant-keyword')) diff --git a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java index a0f5ba84fd355..fe62d4e2d2639 100644 --- a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java +++ b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java @@ -40,6 +40,7 @@ public class CoreWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTest .module("wildcard") .module("analysis-common") .module("health-shards-availability") + .module("data-streams") .setting("xpack.security.enabled", "true") .setting("xpack.watcher.enabled", "false") .setting("xpack.ml.enabled", "false") From 9426d8bc1d443cbb89012262bf734d7036c16b70 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 28 Mar 2024 17:48:57 +0100 Subject: [PATCH 10/69] Add a mechanism for zero-IO creation of small blobstore backed `IndexInput` (#106794) There's loads of scenarios where we create very small slices (as in less than buffer size) from input that already have these bytes buffered. (BKDReader#packedIndex for example) We can save considerable memory as well as potential IO to disk or worse-yet the blob store by just slicing the buffer if possible. Outside of the case of slicing and never reading from the slice, this should always save memory. --- .../common/BlobCacheBufferedIndexInput.java | 24 +++++++++++++++++++ .../input/DirectBlobContainerIndexInput.java | 4 ++++ .../input/MetadataCachingIndexInput.java | 4 ++++ 3 files changed, 32 insertions(+) diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java index 8eb7e32bfdd3a..71ee6da24cdc7 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java @@ -11,6 +11,8 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.RandomAccessInput; import org.elasticsearch.common.io.stream.ByteBufferStreamInput; +import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; +import org.elasticsearch.core.Nullable; import java.io.EOFException; import java.io.IOException; @@ -319,6 +321,28 @@ public final void seek(long pos) throws IOException { } } + /** + * Try slicing {@code sliceLength} bytes from the given {@code sliceOffset} from the currently buffered. + * If this input's buffer currently contains the sliced range fully, then it is copied to a newly allocated byte array and an array + * backed index input is returned. Using this method will never allocate a byte array larger than the buffer size and will result in + * a potentially more memory efficient {@link IndexInput} than slicing to a new {@link BlobCacheBufferedIndexInput} and will prevent + * any further reads from input that is wrapped by this instance. + * + * @param name slice name + * @param sliceOffset slice offset + * @param sliceLength slice length + * @return a byte array backed index input if slicing directly from the buffer worked or {@code null} otherwise + */ + @Nullable + protected final IndexInput trySliceBuffer(String name, long sliceOffset, long sliceLength) { + if (ByteRange.of(bufferStart, bufferStart + buffer.limit()).contains(sliceOffset, sliceOffset + sliceLength)) { + final byte[] bytes = new byte[(int) sliceLength]; + buffer.get(Math.toIntExact(sliceOffset - bufferStart), bytes, 0, bytes.length); + return new ByteArrayIndexInput(name, bytes); + } + return null; + } + /** * Expert: implements seek. Sets current position in this file, where the next {@link * #readInternal(ByteBuffer)} will occur. diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java index ea85a91677c46..37b5fd5c14a95 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/DirectBlobContainerIndexInput.java @@ -300,6 +300,10 @@ public DirectBlobContainerIndexInput clone() { @Override public IndexInput slice(String sliceName, long offset, long length) throws IOException { BlobCacheUtils.ensureSlice(sliceName, offset, length, this); + var bufferSlice = trySliceBuffer(sliceName, offset, length); + if (bufferSlice != null) { + return bufferSlice; + } final DirectBlobContainerIndexInput slice = new DirectBlobContainerIndexInput( sliceName, blobContainer, diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index e9f4ab11c9b7c..ff8633bdaad17 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -674,6 +674,10 @@ protected String getFullSliceDescription(String sliceDescription) { @Override public IndexInput slice(String sliceName, long sliceOffset, long sliceLength) { + var bufferSlice = trySliceBuffer(sliceName, sliceOffset, sliceLength); + if (bufferSlice != null) { + return bufferSlice; + } BlobCacheUtils.ensureSlice(sliceName, sliceOffset, sliceLength, this); // Are we creating a slice from a CFS file? From 52fcf8142b8525029cb07265fcb4d3b9354dd5a1 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 28 Mar 2024 18:01:01 +0100 Subject: [PATCH 11/69] Fold ExactFieldName into FieldName (#106867) FieldName does not make much sense as an abstract class with a single private subclass. Also, the base implementation holds most of the fields that the subclass relies on to do its job. They can be unified into a single class --- .../SecurityIndexFieldNameTranslator.java | 32 ++++++------------- 1 file changed, 10 insertions(+), 22 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java index 291d55b7b0837..e262454af2958 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java @@ -42,43 +42,31 @@ public static FieldName exact(String name) { } public static FieldName exact(String name, Function translation) { - return new SecurityIndexFieldNameTranslator.ExactFieldName(name, translation); + return new SecurityIndexFieldNameTranslator.FieldName(name, translation); } - public abstract static class FieldName { + public static class FieldName { + private final String name; private final Function toIndexFieldName; protected final Predicate validIndexNamePredicate; - FieldName(Function toIndexFieldName, Predicate validIndexNamePredicate) { + private FieldName(String name, Function toIndexFieldName) { + this.name = name; this.toIndexFieldName = toIndexFieldName; - this.validIndexNamePredicate = validIndexNamePredicate; - } - - public abstract boolean supportsQueryName(String queryFieldName); - - public abstract boolean supportsIndexName(String indexFieldName); + this.validIndexNamePredicate = fieldName -> toIndexFieldName.apply(name).equals(fieldName); - public String indexFieldName(String queryFieldName) { - return toIndexFieldName.apply(queryFieldName); } - } - private static class ExactFieldName extends FieldName { - private final String name; - - private ExactFieldName(String name, Function toIndexFieldName) { - super(toIndexFieldName, fieldName -> toIndexFieldName.apply(name).equals(fieldName)); - this.name = name; - } - - @Override public boolean supportsQueryName(String queryFieldName) { return queryFieldName.equals(name); } - @Override public boolean supportsIndexName(String indexFieldName) { return validIndexNamePredicate.test(indexFieldName); } + + public String indexFieldName(String queryFieldName) { + return toIndexFieldName.apply(queryFieldName); + } } } From aeeb5979ed84bfb5a8b92ab34fd2e3714a9e69b3 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Thu, 28 Mar 2024 18:07:36 +0100 Subject: [PATCH 12/69] ESQL: Add OPTIONS clause to FROM command (#106636) This adds an OPTIONS clause to FROM, allowing to specify search or index resolution options, such as: preference, allow_no_indices or ignore_unavailable. --- docs/changelog/106636.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/esql/ccq/MultiClusterSpecIT.java | 2 +- .../xpack/esql/ccq/MultiClustersIT.java | 49 + .../xpack/esql/qa/rest/RestEsqlTestCase.java | 80 + .../resources/{id.csv-spec => from.csv-spec} | 15 + .../esql/src/main/antlr/EsqlBaseLexer.g4 | 6 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 94 +- .../esql/src/main/antlr/EsqlBaseParser.g4 | 21 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 94 +- .../xpack/esql/analysis/Analyzer.java | 3 +- .../xpack/esql/analysis/PreAnalyzer.java | 15 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 13 +- .../xpack/esql/parser/EsqlBaseLexer.interp | 10 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1465 +++++++------- .../xpack/esql/parser/EsqlBaseParser.interp | 10 +- .../xpack/esql/parser/EsqlBaseParser.java | 1678 +++++++++-------- .../parser/EsqlBaseParserBaseListener.java | 48 +- .../parser/EsqlBaseParserBaseVisitor.java | 28 +- .../esql/parser/EsqlBaseParserListener.java | 40 +- .../esql/parser/EsqlBaseParserVisitor.java | 24 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 17 +- .../plan/logical/EsqlUnresolvedRelation.java | 31 +- .../xpack/esql/planner/PlannerUtils.java | 7 + .../xpack/esql/plugin/ComputeService.java | 80 +- .../xpack/esql/plugin/EsqlFeatures.java | 7 +- .../xpack/esql/session/EsqlIndexResolver.java | 15 +- .../xpack/esql/session/EsqlSession.java | 10 +- .../esql/io/stream/PlanNamedTypesTests.java | 38 +- .../esql/parser/StatementParserTests.java | 104 +- .../xpack/ql/options/EsSourceOptions.java | 135 ++ .../xpack/ql/plan/logical/EsRelation.java | 24 +- 32 files changed, 2478 insertions(+), 1691 deletions(-) create mode 100644 docs/changelog/106636.yaml rename x-pack/plugin/esql/qa/testFixtures/src/main/resources/{id.csv-spec => from.csv-spec} (84%) create mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/options/EsSourceOptions.java diff --git a/docs/changelog/106636.yaml b/docs/changelog/106636.yaml new file mode 100644 index 0000000000000..e110d98ca577d --- /dev/null +++ b/docs/changelog/106636.yaml @@ -0,0 +1,5 @@ +pr: 106636 +summary: "ESQL: Add OPTIONS clause to FROM command" +area: ES|QL +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 415e3cf14e3a7..698c879c3f252 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -155,6 +155,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_EMBEDDING_BYTE_ADDED = def(8_615_00_0); public static final TransportVersion ML_INFERENCE_L2_NORM_SIMILARITY_ADDED = def(8_616_00_0); public static final TransportVersion SEARCH_NODE_LOAD_AUTOSCALING = def(8_617_00_0); + public static final TransportVersion ESQL_ES_SOURCE_OPTIONS = def(8_618_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 0ea445255f0d8..ca084ab26908d 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -192,7 +192,7 @@ static CsvSpecReader.CsvTestCase convertToRemoteIndices(CsvSpecReader.CsvTestCas String first = commands[0].trim(); if (commands[0].toLowerCase(Locale.ROOT).startsWith("from")) { - String[] parts = commands[0].split("(?i)metadata"); + String[] parts = commands[0].split("(?i)(metadata|options)"); assert parts.length >= 1 : parts; String fromStatement = parts[0]; diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java index f79de820ae48d..9a494f6309997 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClustersIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.test.TestClustersThreadFilter; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; import org.junit.After; import org.junit.Before; @@ -27,6 +28,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -42,6 +44,8 @@ public class MultiClustersIT extends ESRestTestCase { @ClassRule public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); + private static TestFeatureService remoteFeaturesService; + @Override protected String getTestRestCluster() { return localCluster.getHttpAddresses(); @@ -152,6 +156,34 @@ public void testCount() throws Exception { } } + public void testCountWithOptions() throws Exception { + assumeTrue("remote cluster requires FROM OPTIONS support", remoteFeaturesService().clusterHasFeature("esql.from_options")); + { + Map result = run( + "FROM test-local-index,*:test-remote-index,doesnotexist " + + "OPTIONS \"ignore_unavailable\"=\"true\",\"preference\"=\"_local\" | STATS c = COUNT(*)" + ); + var columns = List.of(Map.of("name", "c", "type", "long")); + var values = List.of(List.of(localDocs.size() + remoteDocs.size())); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + { + Map result = run( + "FROM *:test-remote-index,doesnotexit OPTIONS \"ignore_unavailable\"=\"true\",\"preference\"=\"_local\" " + + "| STATS c = COUNT(*)" + ); + var columns = List.of(Map.of("name", "c", "type", "long")); + var values = List.of(List.of(remoteDocs.size())); + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + { + Map result = run("FROM *:test-remote-index OPTIONS \"preference\"=\"_shards:999\" | STATS c = COUNT(*)"); + var columns = List.of(Map.of("name", "c", "type", "long")); + var values = List.of(List.of(0)); // shard with id 999 above (non-existent) yields count 0 + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + } + public void testUngroupedAggs() throws Exception { { Map result = run("FROM test-local-index,*:test-remote-index | STATS total = SUM(data)"); @@ -200,4 +232,21 @@ private RestClient remoteClusterClient() throws IOException { var clusterHosts = parseClusterHosts(remoteCluster.getHttpAddresses()); return buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[0])); } + + private TestFeatureService remoteFeaturesService() throws IOException { + if (remoteFeaturesService == null) { + try (var remoteFeaturesServiceClient = remoteClusterClient()) { + var remoteNodeVersions = readVersionsFromNodesInfo(remoteFeaturesServiceClient); + var semanticNodeVersions = remoteNodeVersions.stream() + .map(ESRestTestCase::parseLegacyVersion) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); + remoteFeaturesService = createTestFeatureService( + getClusterStateFeatures(remoteFeaturesServiceClient), + semanticNodeVersions + ); + } + } + return remoteFeaturesService; + } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 2d0a39da5a8b4..301b30df5647a 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -16,10 +16,12 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.WarningsHandler; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.logging.LogManager; @@ -215,6 +217,84 @@ public void testUseUnknownIndex() throws IOException { assertThat(e.getMessage(), containsString("Unknown index [doesNotExist]")); } + public void testUseKnownIndexWithUnknownIndex() throws IOException { + // to ignore a concrete non-existent index, we need to opt in (which is not the default) + useKnownIndexWithOther("noSuchIndex", "ignore_unavailable"); + } + + public void testUseKnownIndexWithUnknownPattern() throws IOException { + // to not ignore a non-existing index, we need to opt in (which is the default) + useKnownIndexWithOther("noSuchPattern*", "allow_no_indices"); + } + + private void useKnownIndexWithOther(String other, String option) throws IOException { + final int count = randomIntBetween(1, 10); + bulkLoadTestData(count); + + CheckedFunction builder = o -> { + String q = fromIndex() + ',' + other; + q += " OPTIONS \"" + option + "\"=\"" + o + "\""; + q += " | KEEP keyword, integer | SORT integer asc | LIMIT 10"; + return builder().query(q); + }; + + // test failure + ResponseException e = expectThrows(ResponseException.class, () -> runEsql(builder.apply(false))); + assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("no such index [" + other + "]")); + + // test success + assertEquals(expectedTextBody("txt", count, null), runEsqlAsTextWithFormat(builder.apply(true), "txt", null)); + } + + // https://github.com/elastic/elasticsearch/issues/106805 + public void testUseUnknownIndexOnly() { + useUnknownIndex("ignore_unavailable"); + useUnknownIndex("allow_no_indices"); + } + + private void useUnknownIndex(String option) { + CheckedFunction builder = o -> { + String q = "FROM doesnotexist OPTIONS \"" + option + "\"=\"" + o + "\""; + q += " | KEEP keyword, integer | SORT integer asc | LIMIT 10"; + return builder().query(q); + }; + + // test failure 404 from resolver + ResponseException e = expectThrows(ResponseException.class, () -> runEsql(builder.apply(false))); + assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("index_not_found_exception")); + assertThat(e.getMessage(), containsString("no such index [doesnotexist]")); + + // test failure 400 from verifier + e = expectThrows(ResponseException.class, () -> runEsql(builder.apply(true))); + assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); + assertThat(e.getMessage(), containsString("verification_exception")); + assertThat(e.getMessage(), containsString("Unknown index [doesnotexist]")); + + } + + public void testSearchPreference() throws IOException { + final int count = randomIntBetween(1, 10); + bulkLoadTestData(count); + + CheckedFunction builder = o -> { + String q = fromIndex(); + if (Strings.hasText(o)) { + q += " OPTIONS " + o; + } + q += " | KEEP keyword, integer | SORT integer asc | LIMIT 10"; + return builder().query(q); + }; + + // verify that it returns as expected + assertEquals(expectedTextBody("txt", count, null), runEsqlAsTextWithFormat(builder.apply(null), "txt", null)); + + // returns nothing (0 for count), given the non-existing shard as preference + String option = "\"preference\"=\"_shards:666\""; + assertEquals(expectedTextBody("txt", 0, null), runEsqlAsTextWithFormat(builder.apply(option), "txt", null)); + } + public void testNullInAggs() throws IOException { StringBuilder b = new StringBuilder(); for (int i = 0; i < 1000; i++) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/id.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec similarity index 84% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/id.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec index d5e2aa5cc2bcf..76b97d11a7f85 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/id.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec @@ -128,3 +128,18 @@ c:l | name:k 1 | ddddd 1 | eeeee ; + +convertFromDatetimeWithOptions +required_feature: esql.from_options + + FROM employees OPTIONS "allow_no_indices"="false","preference"="_shards:0" +| SORT emp_no +| EVAL hire_double = to_double(hire_date) +| KEEP emp_no, hire_date, hire_double +| LIMIT 3; + +emp_no:integer |hire_date:date |hire_double:double +10001 |1986-06-26T00:00:00.000Z|5.20128E11 +10002 |1985-11-21T00:00:00.000Z|5.013792E11 +10003 |1986-08-28T00:00:00.000Z|5.255712E11 +; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index bc21a60a76ed8..9bc3d695ee9fa 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -89,7 +89,7 @@ fragment UNQUOTED_ID_BODY : (LETTER | DIGIT | UNDERSCORE) ; -STRING +QUOTED_STRING : '"' (ESCAPE_SEQUENCE | UNESCAPED_CHARS)* '"' | '"""' (~[\r\n])*? '"""' '"'? '"'? ; @@ -186,8 +186,10 @@ FROM_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET); FROM_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET); FROM_COMMA : COMMA -> type(COMMA); FROM_ASSIGN : ASSIGN -> type(ASSIGN); +FROM_QUOTED_STRING : QUOTED_STRING -> type(QUOTED_STRING); -METADATA: 'metadata'; +OPTIONS : 'options'; +METADATA : 'metadata'; fragment FROM_UNQUOTED_IDENTIFIER_PART : ~[=`|,[\]/ \t\r\n] diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 5edc646fad10e..fc02831fc219f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -24,7 +24,7 @@ EXPLAIN_WS=23 EXPLAIN_LINE_COMMENT=24 EXPLAIN_MULTILINE_COMMENT=25 PIPE=26 -STRING=27 +QUOTED_STRING=27 INTEGER_LITERAL=28 DECIMAL_LITERAL=29 BY=30 @@ -68,44 +68,45 @@ QUOTED_IDENTIFIER=67 EXPR_LINE_COMMENT=68 EXPR_MULTILINE_COMMENT=69 EXPR_WS=70 -METADATA=71 -FROM_UNQUOTED_IDENTIFIER=72 -FROM_LINE_COMMENT=73 -FROM_MULTILINE_COMMENT=74 -FROM_WS=75 -ID_PATTERN=76 -PROJECT_LINE_COMMENT=77 -PROJECT_MULTILINE_COMMENT=78 -PROJECT_WS=79 -AS=80 -RENAME_LINE_COMMENT=81 -RENAME_MULTILINE_COMMENT=82 -RENAME_WS=83 -ON=84 -WITH=85 -ENRICH_POLICY_NAME=86 -ENRICH_LINE_COMMENT=87 -ENRICH_MULTILINE_COMMENT=88 -ENRICH_WS=89 -ENRICH_FIELD_LINE_COMMENT=90 -ENRICH_FIELD_MULTILINE_COMMENT=91 -ENRICH_FIELD_WS=92 -MVEXPAND_LINE_COMMENT=93 -MVEXPAND_MULTILINE_COMMENT=94 -MVEXPAND_WS=95 -INFO=96 -SHOW_LINE_COMMENT=97 -SHOW_MULTILINE_COMMENT=98 -SHOW_WS=99 -FUNCTIONS=100 -META_LINE_COMMENT=101 -META_MULTILINE_COMMENT=102 -META_WS=103 -COLON=104 -SETTING=105 -SETTING_LINE_COMMENT=106 -SETTTING_MULTILINE_COMMENT=107 -SETTING_WS=108 +OPTIONS=71 +METADATA=72 +FROM_UNQUOTED_IDENTIFIER=73 +FROM_LINE_COMMENT=74 +FROM_MULTILINE_COMMENT=75 +FROM_WS=76 +ID_PATTERN=77 +PROJECT_LINE_COMMENT=78 +PROJECT_MULTILINE_COMMENT=79 +PROJECT_WS=80 +AS=81 +RENAME_LINE_COMMENT=82 +RENAME_MULTILINE_COMMENT=83 +RENAME_WS=84 +ON=85 +WITH=86 +ENRICH_POLICY_NAME=87 +ENRICH_LINE_COMMENT=88 +ENRICH_MULTILINE_COMMENT=89 +ENRICH_WS=90 +ENRICH_FIELD_LINE_COMMENT=91 +ENRICH_FIELD_MULTILINE_COMMENT=92 +ENRICH_FIELD_WS=93 +MVEXPAND_LINE_COMMENT=94 +MVEXPAND_MULTILINE_COMMENT=95 +MVEXPAND_WS=96 +INFO=97 +SHOW_LINE_COMMENT=98 +SHOW_MULTILINE_COMMENT=99 +SHOW_WS=100 +FUNCTIONS=101 +META_LINE_COMMENT=102 +META_MULTILINE_COMMENT=103 +META_WS=104 +COLON=105 +SETTING=106 +SETTING_LINE_COMMENT=107 +SETTTING_MULTILINE_COMMENT=108 +SETTING_WS=109 'dissect'=1 'drop'=2 'enrich'=3 @@ -160,10 +161,11 @@ SETTING_WS=108 '/'=62 '%'=63 ']'=65 -'metadata'=71 -'as'=80 -'on'=84 -'with'=85 -'info'=96 -'functions'=100 -':'=104 +'options'=71 +'metadata'=72 +'as'=81 +'on'=85 +'with'=86 +'info'=97 +'functions'=101 +':'=105 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index a7d0097b6aec8..06a15adb3ecbe 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -99,7 +99,20 @@ field ; fromCommand - : FROM fromIdentifier (COMMA fromIdentifier)* metadata? + : FROM fromIdentifier (COMMA fromIdentifier)* fromOptions? metadata? + ; + +fromIdentifier + : FROM_UNQUOTED_IDENTIFIER + | QUOTED_IDENTIFIER + ; + +fromOptions + : OPTIONS configOption (COMMA configOption)* + ; + +configOption + : string ASSIGN string ; metadata @@ -127,10 +140,6 @@ inlinestatsCommand : INLINESTATS stats=fields (BY grouping=fields)? ; -fromIdentifier - : FROM_UNQUOTED_IDENTIFIER - | QUOTED_IDENTIFIER - ; qualifiedName : identifier (DOT identifier)* @@ -228,7 +237,7 @@ integerValue ; string - : STRING + : QUOTED_STRING ; comparisonOperator diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 5edc646fad10e..fc02831fc219f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -24,7 +24,7 @@ EXPLAIN_WS=23 EXPLAIN_LINE_COMMENT=24 EXPLAIN_MULTILINE_COMMENT=25 PIPE=26 -STRING=27 +QUOTED_STRING=27 INTEGER_LITERAL=28 DECIMAL_LITERAL=29 BY=30 @@ -68,44 +68,45 @@ QUOTED_IDENTIFIER=67 EXPR_LINE_COMMENT=68 EXPR_MULTILINE_COMMENT=69 EXPR_WS=70 -METADATA=71 -FROM_UNQUOTED_IDENTIFIER=72 -FROM_LINE_COMMENT=73 -FROM_MULTILINE_COMMENT=74 -FROM_WS=75 -ID_PATTERN=76 -PROJECT_LINE_COMMENT=77 -PROJECT_MULTILINE_COMMENT=78 -PROJECT_WS=79 -AS=80 -RENAME_LINE_COMMENT=81 -RENAME_MULTILINE_COMMENT=82 -RENAME_WS=83 -ON=84 -WITH=85 -ENRICH_POLICY_NAME=86 -ENRICH_LINE_COMMENT=87 -ENRICH_MULTILINE_COMMENT=88 -ENRICH_WS=89 -ENRICH_FIELD_LINE_COMMENT=90 -ENRICH_FIELD_MULTILINE_COMMENT=91 -ENRICH_FIELD_WS=92 -MVEXPAND_LINE_COMMENT=93 -MVEXPAND_MULTILINE_COMMENT=94 -MVEXPAND_WS=95 -INFO=96 -SHOW_LINE_COMMENT=97 -SHOW_MULTILINE_COMMENT=98 -SHOW_WS=99 -FUNCTIONS=100 -META_LINE_COMMENT=101 -META_MULTILINE_COMMENT=102 -META_WS=103 -COLON=104 -SETTING=105 -SETTING_LINE_COMMENT=106 -SETTTING_MULTILINE_COMMENT=107 -SETTING_WS=108 +OPTIONS=71 +METADATA=72 +FROM_UNQUOTED_IDENTIFIER=73 +FROM_LINE_COMMENT=74 +FROM_MULTILINE_COMMENT=75 +FROM_WS=76 +ID_PATTERN=77 +PROJECT_LINE_COMMENT=78 +PROJECT_MULTILINE_COMMENT=79 +PROJECT_WS=80 +AS=81 +RENAME_LINE_COMMENT=82 +RENAME_MULTILINE_COMMENT=83 +RENAME_WS=84 +ON=85 +WITH=86 +ENRICH_POLICY_NAME=87 +ENRICH_LINE_COMMENT=88 +ENRICH_MULTILINE_COMMENT=89 +ENRICH_WS=90 +ENRICH_FIELD_LINE_COMMENT=91 +ENRICH_FIELD_MULTILINE_COMMENT=92 +ENRICH_FIELD_WS=93 +MVEXPAND_LINE_COMMENT=94 +MVEXPAND_MULTILINE_COMMENT=95 +MVEXPAND_WS=96 +INFO=97 +SHOW_LINE_COMMENT=98 +SHOW_MULTILINE_COMMENT=99 +SHOW_WS=100 +FUNCTIONS=101 +META_LINE_COMMENT=102 +META_MULTILINE_COMMENT=103 +META_WS=104 +COLON=105 +SETTING=106 +SETTING_LINE_COMMENT=107 +SETTTING_MULTILINE_COMMENT=108 +SETTING_WS=109 'dissect'=1 'drop'=2 'enrich'=3 @@ -160,10 +161,11 @@ SETTING_WS=108 '/'=62 '%'=63 ']'=65 -'metadata'=71 -'as'=80 -'on'=84 -'with'=85 -'info'=96 -'functions'=100 -':'=104 +'options'=71 +'metadata'=72 +'as'=81 +'on'=85 +'with'=86 +'info'=97 +'functions'=101 +':'=105 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 14344502f165a..5c787415a8419 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -153,6 +153,7 @@ protected LogicalPlan rule(EsqlUnresolvedRelation plan, AnalyzerContext context) } TableIdentifier table = plan.table(); if (context.indexResolution().matches(table.index()) == false) { + // TODO: fix this (and tests), or drop check (seems SQL-inherited, where's also defective) new EsqlUnresolvedRelation( plan.source(), plan.table(), @@ -164,7 +165,7 @@ protected LogicalPlan rule(EsqlUnresolvedRelation plan, AnalyzerContext context) EsIndex esIndex = context.indexResolution().get(); var attributes = mappingAsAttributes(plan.source(), esIndex.mapping()); attributes.addAll(plan.metadataFields()); - return new EsRelation(plan.source(), esIndex, attributes.isEmpty() ? NO_FIELDS : attributes); + return new EsRelation(plan.source(), esIndex, attributes.isEmpty() ? NO_FIELDS : attributes, plan.esSourceOptions()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java index 654ece932e4ed..8e99ec502ff95 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.ql.analyzer.TableInfo; +import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import java.util.ArrayList; @@ -20,13 +21,15 @@ public class PreAnalyzer { public static class PreAnalysis { - public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList()); + public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList(), emptyList()); public final List indices; + public final List esSourceOptions; public final List enriches; - public PreAnalysis(List indices, List enriches) { + public PreAnalysis(List indices, List esSourceOptions, List enriches) { this.indices = indices; + this.esSourceOptions = esSourceOptions; this.enriches = enriches; } } @@ -41,14 +44,18 @@ public PreAnalysis preAnalyze(LogicalPlan plan) { protected PreAnalysis doPreAnalyze(LogicalPlan plan) { List indices = new ArrayList<>(); + List esSourceOptions = new ArrayList<>(); List unresolvedEnriches = new ArrayList<>(); - plan.forEachUp(EsqlUnresolvedRelation.class, p -> indices.add(new TableInfo(p.table(), p.frozen()))); + plan.forEachUp(EsqlUnresolvedRelation.class, p -> { + indices.add(new TableInfo(p.table(), p.frozen())); + esSourceOptions.add(p.esSourceOptions()); + }); plan.forEachUp(Enrich.class, unresolvedEnriches::add); // mark plan as preAnalyzed (if it were marked, there would be no analysis) plan.forEachUp(LogicalPlan::setPreAnalyzed); - return new PreAnalysis(indices, unresolvedEnriches); + return new PreAnalysis(indices, esSourceOptions, unresolvedEnriches); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index d198d740029e1..44e134a0d7aec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -182,6 +182,7 @@ import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.ql.index.EsIndex; +import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; @@ -772,7 +773,14 @@ static void writeDissect(PlanStreamOutput out, Dissect dissect) throws IOExcepti } static EsRelation readEsRelation(PlanStreamInput in) throws IOException { - return new EsRelation(in.readSource(), readEsIndex(in), readAttributes(in), in.readBoolean()); + Source source = in.readSource(); + EsIndex esIndex = readEsIndex(in); + List attributes = readAttributes(in); + EsSourceOptions esSourceOptions = in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_SOURCE_OPTIONS) + ? new EsSourceOptions(in) + : EsSourceOptions.NO_OPTIONS; + boolean frozen = in.readBoolean(); + return new EsRelation(source, esIndex, attributes, esSourceOptions, frozen); } static void writeEsRelation(PlanStreamOutput out, EsRelation relation) throws IOException { @@ -780,6 +788,9 @@ static void writeEsRelation(PlanStreamOutput out, EsRelation relation) throws IO out.writeNoSource(); writeEsIndex(out, relation.index()); writeAttributes(out, relation.output()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ES_SOURCE_OPTIONS)) { + relation.esSourceOptions().writeEsSourceOptions(out); + } out.writeBoolean(relation.frozen()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index ed47b27924a92..0a115745c0b23 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -70,6 +70,7 @@ null null null null +'options' 'metadata' null null @@ -137,7 +138,7 @@ EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT PIPE -STRING +QUOTED_STRING INTEGER_LITERAL DECIMAL_LITERAL BY @@ -181,6 +182,7 @@ QUOTED_IDENTIFIER EXPR_LINE_COMMENT EXPR_MULTILINE_COMMENT EXPR_WS +OPTIONS METADATA FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT @@ -259,7 +261,7 @@ BACKQUOTE BACKQUOTE_BLOCK UNDERSCORE UNQUOTED_ID_BODY -STRING +QUOTED_STRING INTEGER_LITERAL DECIMAL_LITERAL BY @@ -309,6 +311,8 @@ FROM_OPENING_BRACKET FROM_CLOSING_BRACKET FROM_COMMA FROM_ASSIGN +FROM_QUTED_STRING +OPTIONS METADATA FROM_UNQUOTED_IDENTIFIER_PART FROM_UNQUOTED_IDENTIFIER @@ -398,4 +402,4 @@ META_MODE SETTING_MODE atn: -[4, 0, 108, 1182, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 478, 8, 18, 11, 18, 12, 18, 479, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 488, 8, 19, 10, 19, 12, 19, 491, 9, 19, 1, 19, 3, 19, 494, 8, 19, 1, 19, 3, 19, 497, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 506, 8, 20, 10, 20, 12, 20, 509, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 517, 8, 21, 11, 21, 12, 21, 518, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 560, 8, 32, 1, 32, 4, 32, 563, 8, 32, 11, 32, 12, 32, 564, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 574, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 581, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 586, 8, 38, 10, 38, 12, 38, 589, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 597, 8, 38, 10, 38, 12, 38, 600, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 607, 8, 38, 1, 38, 3, 38, 610, 8, 38, 3, 38, 612, 8, 38, 1, 39, 4, 39, 615, 8, 39, 11, 39, 12, 39, 616, 1, 40, 4, 40, 620, 8, 40, 11, 40, 12, 40, 621, 1, 40, 1, 40, 5, 40, 626, 8, 40, 10, 40, 12, 40, 629, 9, 40, 1, 40, 1, 40, 4, 40, 633, 8, 40, 11, 40, 12, 40, 634, 1, 40, 4, 40, 638, 8, 40, 11, 40, 12, 40, 639, 1, 40, 1, 40, 5, 40, 644, 8, 40, 10, 40, 12, 40, 647, 9, 40, 3, 40, 649, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 655, 8, 40, 11, 40, 12, 40, 656, 1, 40, 1, 40, 3, 40, 661, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 789, 8, 77, 10, 77, 12, 77, 792, 9, 77, 1, 77, 1, 77, 3, 77, 796, 8, 77, 1, 77, 4, 77, 799, 8, 77, 11, 77, 12, 77, 800, 3, 77, 803, 8, 77, 1, 78, 1, 78, 4, 78, 807, 8, 78, 11, 78, 12, 78, 808, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 3, 89, 860, 8, 89, 1, 90, 4, 90, 863, 8, 90, 11, 90, 12, 90, 864, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 3, 98, 900, 8, 98, 1, 99, 1, 99, 3, 99, 904, 8, 99, 1, 99, 5, 99, 907, 8, 99, 10, 99, 12, 99, 910, 9, 99, 1, 99, 1, 99, 3, 99, 914, 8, 99, 1, 99, 4, 99, 917, 8, 99, 11, 99, 12, 99, 918, 3, 99, 921, 8, 99, 1, 100, 1, 100, 4, 100, 925, 8, 100, 11, 100, 12, 100, 926, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 118, 4, 118, 1002, 8, 118, 11, 118, 12, 118, 1003, 1, 118, 1, 118, 3, 118, 1008, 8, 118, 1, 118, 4, 118, 1011, 8, 118, 11, 118, 12, 118, 1012, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 4, 153, 1167, 8, 153, 11, 153, 12, 153, 1168, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 2, 507, 598, 0, 157, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 0, 170, 67, 172, 68, 174, 69, 176, 70, 178, 0, 180, 0, 182, 0, 184, 0, 186, 0, 188, 71, 190, 0, 192, 72, 194, 0, 196, 73, 198, 74, 200, 75, 202, 0, 204, 0, 206, 0, 208, 0, 210, 0, 212, 76, 214, 77, 216, 78, 218, 79, 220, 0, 222, 0, 224, 0, 226, 0, 228, 80, 230, 0, 232, 81, 234, 82, 236, 83, 238, 0, 240, 0, 242, 84, 244, 85, 246, 0, 248, 86, 250, 0, 252, 0, 254, 87, 256, 88, 258, 89, 260, 0, 262, 0, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 90, 276, 91, 278, 92, 280, 0, 282, 0, 284, 0, 286, 0, 288, 93, 290, 94, 292, 95, 294, 0, 296, 96, 298, 97, 300, 98, 302, 99, 304, 0, 306, 100, 308, 101, 310, 102, 312, 103, 314, 0, 316, 104, 318, 105, 320, 106, 322, 107, 324, 108, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1209, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 3, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 4, 202, 1, 0, 0, 0, 4, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 214, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 5, 220, 1, 0, 0, 0, 5, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 6, 238, 1, 0, 0, 0, 6, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 7, 260, 1, 0, 0, 0, 7, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 8, 280, 1, 0, 0, 0, 8, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 9, 294, 1, 0, 0, 0, 9, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 10, 304, 1, 0, 0, 0, 10, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 11, 314, 1, 0, 0, 0, 11, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 12, 326, 1, 0, 0, 0, 14, 336, 1, 0, 0, 0, 16, 343, 1, 0, 0, 0, 18, 352, 1, 0, 0, 0, 20, 359, 1, 0, 0, 0, 22, 369, 1, 0, 0, 0, 24, 376, 1, 0, 0, 0, 26, 383, 1, 0, 0, 0, 28, 397, 1, 0, 0, 0, 30, 404, 1, 0, 0, 0, 32, 412, 1, 0, 0, 0, 34, 419, 1, 0, 0, 0, 36, 431, 1, 0, 0, 0, 38, 440, 1, 0, 0, 0, 40, 446, 1, 0, 0, 0, 42, 453, 1, 0, 0, 0, 44, 460, 1, 0, 0, 0, 46, 468, 1, 0, 0, 0, 48, 477, 1, 0, 0, 0, 50, 483, 1, 0, 0, 0, 52, 500, 1, 0, 0, 0, 54, 516, 1, 0, 0, 0, 56, 522, 1, 0, 0, 0, 58, 527, 1, 0, 0, 0, 60, 532, 1, 0, 0, 0, 62, 536, 1, 0, 0, 0, 64, 540, 1, 0, 0, 0, 66, 544, 1, 0, 0, 0, 68, 548, 1, 0, 0, 0, 70, 550, 1, 0, 0, 0, 72, 552, 1, 0, 0, 0, 74, 555, 1, 0, 0, 0, 76, 557, 1, 0, 0, 0, 78, 566, 1, 0, 0, 0, 80, 568, 1, 0, 0, 0, 82, 573, 1, 0, 0, 0, 84, 575, 1, 0, 0, 0, 86, 580, 1, 0, 0, 0, 88, 611, 1, 0, 0, 0, 90, 614, 1, 0, 0, 0, 92, 660, 1, 0, 0, 0, 94, 662, 1, 0, 0, 0, 96, 665, 1, 0, 0, 0, 98, 669, 1, 0, 0, 0, 100, 673, 1, 0, 0, 0, 102, 675, 1, 0, 0, 0, 104, 677, 1, 0, 0, 0, 106, 682, 1, 0, 0, 0, 108, 684, 1, 0, 0, 0, 110, 690, 1, 0, 0, 0, 112, 696, 1, 0, 0, 0, 114, 701, 1, 0, 0, 0, 116, 703, 1, 0, 0, 0, 118, 706, 1, 0, 0, 0, 120, 709, 1, 0, 0, 0, 122, 714, 1, 0, 0, 0, 124, 718, 1, 0, 0, 0, 126, 723, 1, 0, 0, 0, 128, 729, 1, 0, 0, 0, 130, 732, 1, 0, 0, 0, 132, 734, 1, 0, 0, 0, 134, 740, 1, 0, 0, 0, 136, 742, 1, 0, 0, 0, 138, 747, 1, 0, 0, 0, 140, 750, 1, 0, 0, 0, 142, 753, 1, 0, 0, 0, 144, 756, 1, 0, 0, 0, 146, 758, 1, 0, 0, 0, 148, 761, 1, 0, 0, 0, 150, 763, 1, 0, 0, 0, 152, 766, 1, 0, 0, 0, 154, 768, 1, 0, 0, 0, 156, 770, 1, 0, 0, 0, 158, 772, 1, 0, 0, 0, 160, 774, 1, 0, 0, 0, 162, 776, 1, 0, 0, 0, 164, 781, 1, 0, 0, 0, 166, 802, 1, 0, 0, 0, 168, 804, 1, 0, 0, 0, 170, 812, 1, 0, 0, 0, 172, 814, 1, 0, 0, 0, 174, 818, 1, 0, 0, 0, 176, 822, 1, 0, 0, 0, 178, 826, 1, 0, 0, 0, 180, 831, 1, 0, 0, 0, 182, 835, 1, 0, 0, 0, 184, 839, 1, 0, 0, 0, 186, 843, 1, 0, 0, 0, 188, 847, 1, 0, 0, 0, 190, 859, 1, 0, 0, 0, 192, 862, 1, 0, 0, 0, 194, 866, 1, 0, 0, 0, 196, 870, 1, 0, 0, 0, 198, 874, 1, 0, 0, 0, 200, 878, 1, 0, 0, 0, 202, 882, 1, 0, 0, 0, 204, 887, 1, 0, 0, 0, 206, 891, 1, 0, 0, 0, 208, 899, 1, 0, 0, 0, 210, 920, 1, 0, 0, 0, 212, 924, 1, 0, 0, 0, 214, 928, 1, 0, 0, 0, 216, 932, 1, 0, 0, 0, 218, 936, 1, 0, 0, 0, 220, 940, 1, 0, 0, 0, 222, 945, 1, 0, 0, 0, 224, 949, 1, 0, 0, 0, 226, 953, 1, 0, 0, 0, 228, 957, 1, 0, 0, 0, 230, 960, 1, 0, 0, 0, 232, 964, 1, 0, 0, 0, 234, 968, 1, 0, 0, 0, 236, 972, 1, 0, 0, 0, 238, 976, 1, 0, 0, 0, 240, 981, 1, 0, 0, 0, 242, 986, 1, 0, 0, 0, 244, 991, 1, 0, 0, 0, 246, 998, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1014, 1, 0, 0, 0, 252, 1018, 1, 0, 0, 0, 254, 1022, 1, 0, 0, 0, 256, 1026, 1, 0, 0, 0, 258, 1030, 1, 0, 0, 0, 260, 1034, 1, 0, 0, 0, 262, 1040, 1, 0, 0, 0, 264, 1044, 1, 0, 0, 0, 266, 1048, 1, 0, 0, 0, 268, 1052, 1, 0, 0, 0, 270, 1056, 1, 0, 0, 0, 272, 1060, 1, 0, 0, 0, 274, 1064, 1, 0, 0, 0, 276, 1068, 1, 0, 0, 0, 278, 1072, 1, 0, 0, 0, 280, 1076, 1, 0, 0, 0, 282, 1081, 1, 0, 0, 0, 284, 1085, 1, 0, 0, 0, 286, 1089, 1, 0, 0, 0, 288, 1093, 1, 0, 0, 0, 290, 1097, 1, 0, 0, 0, 292, 1101, 1, 0, 0, 0, 294, 1105, 1, 0, 0, 0, 296, 1110, 1, 0, 0, 0, 298, 1115, 1, 0, 0, 0, 300, 1119, 1, 0, 0, 0, 302, 1123, 1, 0, 0, 0, 304, 1127, 1, 0, 0, 0, 306, 1132, 1, 0, 0, 0, 308, 1142, 1, 0, 0, 0, 310, 1146, 1, 0, 0, 0, 312, 1150, 1, 0, 0, 0, 314, 1154, 1, 0, 0, 0, 316, 1159, 1, 0, 0, 0, 318, 1166, 1, 0, 0, 0, 320, 1170, 1, 0, 0, 0, 322, 1174, 1, 0, 0, 0, 324, 1178, 1, 0, 0, 0, 326, 327, 5, 100, 0, 0, 327, 328, 5, 105, 0, 0, 328, 329, 5, 115, 0, 0, 329, 330, 5, 115, 0, 0, 330, 331, 5, 101, 0, 0, 331, 332, 5, 99, 0, 0, 332, 333, 5, 116, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 6, 0, 0, 0, 335, 13, 1, 0, 0, 0, 336, 337, 5, 100, 0, 0, 337, 338, 5, 114, 0, 0, 338, 339, 5, 111, 0, 0, 339, 340, 5, 112, 0, 0, 340, 341, 1, 0, 0, 0, 341, 342, 6, 1, 1, 0, 342, 15, 1, 0, 0, 0, 343, 344, 5, 101, 0, 0, 344, 345, 5, 110, 0, 0, 345, 346, 5, 114, 0, 0, 346, 347, 5, 105, 0, 0, 347, 348, 5, 99, 0, 0, 348, 349, 5, 104, 0, 0, 349, 350, 1, 0, 0, 0, 350, 351, 6, 2, 2, 0, 351, 17, 1, 0, 0, 0, 352, 353, 5, 101, 0, 0, 353, 354, 5, 118, 0, 0, 354, 355, 5, 97, 0, 0, 355, 356, 5, 108, 0, 0, 356, 357, 1, 0, 0, 0, 357, 358, 6, 3, 0, 0, 358, 19, 1, 0, 0, 0, 359, 360, 5, 101, 0, 0, 360, 361, 5, 120, 0, 0, 361, 362, 5, 112, 0, 0, 362, 363, 5, 108, 0, 0, 363, 364, 5, 97, 0, 0, 364, 365, 5, 105, 0, 0, 365, 366, 5, 110, 0, 0, 366, 367, 1, 0, 0, 0, 367, 368, 6, 4, 3, 0, 368, 21, 1, 0, 0, 0, 369, 370, 5, 102, 0, 0, 370, 371, 5, 114, 0, 0, 371, 372, 5, 111, 0, 0, 372, 373, 5, 109, 0, 0, 373, 374, 1, 0, 0, 0, 374, 375, 6, 5, 4, 0, 375, 23, 1, 0, 0, 0, 376, 377, 5, 103, 0, 0, 377, 378, 5, 114, 0, 0, 378, 379, 5, 111, 0, 0, 379, 380, 5, 107, 0, 0, 380, 381, 1, 0, 0, 0, 381, 382, 6, 6, 0, 0, 382, 25, 1, 0, 0, 0, 383, 384, 5, 105, 0, 0, 384, 385, 5, 110, 0, 0, 385, 386, 5, 108, 0, 0, 386, 387, 5, 105, 0, 0, 387, 388, 5, 110, 0, 0, 388, 389, 5, 101, 0, 0, 389, 390, 5, 115, 0, 0, 390, 391, 5, 116, 0, 0, 391, 392, 5, 97, 0, 0, 392, 393, 5, 116, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 1, 0, 0, 0, 395, 396, 6, 7, 0, 0, 396, 27, 1, 0, 0, 0, 397, 398, 5, 107, 0, 0, 398, 399, 5, 101, 0, 0, 399, 400, 5, 101, 0, 0, 400, 401, 5, 112, 0, 0, 401, 402, 1, 0, 0, 0, 402, 403, 6, 8, 1, 0, 403, 29, 1, 0, 0, 0, 404, 405, 5, 108, 0, 0, 405, 406, 5, 105, 0, 0, 406, 407, 5, 109, 0, 0, 407, 408, 5, 105, 0, 0, 408, 409, 5, 116, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 6, 9, 0, 0, 411, 31, 1, 0, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 101, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 5, 97, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 10, 5, 0, 418, 33, 1, 0, 0, 0, 419, 420, 5, 109, 0, 0, 420, 421, 5, 118, 0, 0, 421, 422, 5, 95, 0, 0, 422, 423, 5, 101, 0, 0, 423, 424, 5, 120, 0, 0, 424, 425, 5, 112, 0, 0, 425, 426, 5, 97, 0, 0, 426, 427, 5, 110, 0, 0, 427, 428, 5, 100, 0, 0, 428, 429, 1, 0, 0, 0, 429, 430, 6, 11, 6, 0, 430, 35, 1, 0, 0, 0, 431, 432, 5, 114, 0, 0, 432, 433, 5, 101, 0, 0, 433, 434, 5, 110, 0, 0, 434, 435, 5, 97, 0, 0, 435, 436, 5, 109, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 1, 0, 0, 0, 438, 439, 6, 12, 7, 0, 439, 37, 1, 0, 0, 0, 440, 441, 5, 114, 0, 0, 441, 442, 5, 111, 0, 0, 442, 443, 5, 119, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 13, 0, 0, 445, 39, 1, 0, 0, 0, 446, 447, 5, 115, 0, 0, 447, 448, 5, 104, 0, 0, 448, 449, 5, 111, 0, 0, 449, 450, 5, 119, 0, 0, 450, 451, 1, 0, 0, 0, 451, 452, 6, 14, 8, 0, 452, 41, 1, 0, 0, 0, 453, 454, 5, 115, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 114, 0, 0, 456, 457, 5, 116, 0, 0, 457, 458, 1, 0, 0, 0, 458, 459, 6, 15, 0, 0, 459, 43, 1, 0, 0, 0, 460, 461, 5, 115, 0, 0, 461, 462, 5, 116, 0, 0, 462, 463, 5, 97, 0, 0, 463, 464, 5, 116, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 16, 0, 0, 467, 45, 1, 0, 0, 0, 468, 469, 5, 119, 0, 0, 469, 470, 5, 104, 0, 0, 470, 471, 5, 101, 0, 0, 471, 472, 5, 114, 0, 0, 472, 473, 5, 101, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 6, 17, 0, 0, 475, 47, 1, 0, 0, 0, 476, 478, 8, 0, 0, 0, 477, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 477, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 6, 18, 0, 0, 482, 49, 1, 0, 0, 0, 483, 484, 5, 47, 0, 0, 484, 485, 5, 47, 0, 0, 485, 489, 1, 0, 0, 0, 486, 488, 8, 1, 0, 0, 487, 486, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 492, 494, 5, 13, 0, 0, 493, 492, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 496, 1, 0, 0, 0, 495, 497, 5, 10, 0, 0, 496, 495, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 6, 19, 9, 0, 499, 51, 1, 0, 0, 0, 500, 501, 5, 47, 0, 0, 501, 502, 5, 42, 0, 0, 502, 507, 1, 0, 0, 0, 503, 506, 3, 52, 20, 0, 504, 506, 9, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 504, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 510, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 511, 5, 42, 0, 0, 511, 512, 5, 47, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 6, 20, 9, 0, 514, 53, 1, 0, 0, 0, 515, 517, 7, 2, 0, 0, 516, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 516, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 21, 9, 0, 521, 55, 1, 0, 0, 0, 522, 523, 3, 162, 75, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 22, 10, 0, 525, 526, 6, 22, 11, 0, 526, 57, 1, 0, 0, 0, 527, 528, 3, 66, 27, 0, 528, 529, 1, 0, 0, 0, 529, 530, 6, 23, 12, 0, 530, 531, 6, 23, 13, 0, 531, 59, 1, 0, 0, 0, 532, 533, 3, 54, 21, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 24, 9, 0, 535, 61, 1, 0, 0, 0, 536, 537, 3, 50, 19, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 25, 9, 0, 539, 63, 1, 0, 0, 0, 540, 541, 3, 52, 20, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 26, 9, 0, 543, 65, 1, 0, 0, 0, 544, 545, 5, 124, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 27, 13, 0, 547, 67, 1, 0, 0, 0, 548, 549, 7, 3, 0, 0, 549, 69, 1, 0, 0, 0, 550, 551, 7, 4, 0, 0, 551, 71, 1, 0, 0, 0, 552, 553, 5, 92, 0, 0, 553, 554, 7, 5, 0, 0, 554, 73, 1, 0, 0, 0, 555, 556, 8, 6, 0, 0, 556, 75, 1, 0, 0, 0, 557, 559, 7, 7, 0, 0, 558, 560, 7, 8, 0, 0, 559, 558, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 562, 1, 0, 0, 0, 561, 563, 3, 68, 28, 0, 562, 561, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 77, 1, 0, 0, 0, 566, 567, 5, 64, 0, 0, 567, 79, 1, 0, 0, 0, 568, 569, 5, 96, 0, 0, 569, 81, 1, 0, 0, 0, 570, 574, 8, 9, 0, 0, 571, 572, 5, 96, 0, 0, 572, 574, 5, 96, 0, 0, 573, 570, 1, 0, 0, 0, 573, 571, 1, 0, 0, 0, 574, 83, 1, 0, 0, 0, 575, 576, 5, 95, 0, 0, 576, 85, 1, 0, 0, 0, 577, 581, 3, 70, 29, 0, 578, 581, 3, 68, 28, 0, 579, 581, 3, 84, 36, 0, 580, 577, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 580, 579, 1, 0, 0, 0, 581, 87, 1, 0, 0, 0, 582, 587, 5, 34, 0, 0, 583, 586, 3, 72, 30, 0, 584, 586, 3, 74, 31, 0, 585, 583, 1, 0, 0, 0, 585, 584, 1, 0, 0, 0, 586, 589, 1, 0, 0, 0, 587, 585, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 590, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 590, 612, 5, 34, 0, 0, 591, 592, 5, 34, 0, 0, 592, 593, 5, 34, 0, 0, 593, 594, 5, 34, 0, 0, 594, 598, 1, 0, 0, 0, 595, 597, 8, 1, 0, 0, 596, 595, 1, 0, 0, 0, 597, 600, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 598, 596, 1, 0, 0, 0, 599, 601, 1, 0, 0, 0, 600, 598, 1, 0, 0, 0, 601, 602, 5, 34, 0, 0, 602, 603, 5, 34, 0, 0, 603, 604, 5, 34, 0, 0, 604, 606, 1, 0, 0, 0, 605, 607, 5, 34, 0, 0, 606, 605, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 609, 1, 0, 0, 0, 608, 610, 5, 34, 0, 0, 609, 608, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 612, 1, 0, 0, 0, 611, 582, 1, 0, 0, 0, 611, 591, 1, 0, 0, 0, 612, 89, 1, 0, 0, 0, 613, 615, 3, 68, 28, 0, 614, 613, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 91, 1, 0, 0, 0, 618, 620, 3, 68, 28, 0, 619, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 627, 3, 106, 47, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 629, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 661, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 630, 632, 3, 106, 47, 0, 631, 633, 3, 68, 28, 0, 632, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 632, 1, 0, 0, 0, 634, 635, 1, 0, 0, 0, 635, 661, 1, 0, 0, 0, 636, 638, 3, 68, 28, 0, 637, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 648, 1, 0, 0, 0, 641, 645, 3, 106, 47, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 647, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 649, 1, 0, 0, 0, 647, 645, 1, 0, 0, 0, 648, 641, 1, 0, 0, 0, 648, 649, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 3, 76, 32, 0, 651, 661, 1, 0, 0, 0, 652, 654, 3, 106, 47, 0, 653, 655, 3, 68, 28, 0, 654, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 1, 0, 0, 0, 658, 659, 3, 76, 32, 0, 659, 661, 1, 0, 0, 0, 660, 619, 1, 0, 0, 0, 660, 630, 1, 0, 0, 0, 660, 637, 1, 0, 0, 0, 660, 652, 1, 0, 0, 0, 661, 93, 1, 0, 0, 0, 662, 663, 5, 98, 0, 0, 663, 664, 5, 121, 0, 0, 664, 95, 1, 0, 0, 0, 665, 666, 5, 97, 0, 0, 666, 667, 5, 110, 0, 0, 667, 668, 5, 100, 0, 0, 668, 97, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 115, 0, 0, 671, 672, 5, 99, 0, 0, 672, 99, 1, 0, 0, 0, 673, 674, 5, 61, 0, 0, 674, 101, 1, 0, 0, 0, 675, 676, 5, 44, 0, 0, 676, 103, 1, 0, 0, 0, 677, 678, 5, 100, 0, 0, 678, 679, 5, 101, 0, 0, 679, 680, 5, 115, 0, 0, 680, 681, 5, 99, 0, 0, 681, 105, 1, 0, 0, 0, 682, 683, 5, 46, 0, 0, 683, 107, 1, 0, 0, 0, 684, 685, 5, 102, 0, 0, 685, 686, 5, 97, 0, 0, 686, 687, 5, 108, 0, 0, 687, 688, 5, 115, 0, 0, 688, 689, 5, 101, 0, 0, 689, 109, 1, 0, 0, 0, 690, 691, 5, 102, 0, 0, 691, 692, 5, 105, 0, 0, 692, 693, 5, 114, 0, 0, 693, 694, 5, 115, 0, 0, 694, 695, 5, 116, 0, 0, 695, 111, 1, 0, 0, 0, 696, 697, 5, 108, 0, 0, 697, 698, 5, 97, 0, 0, 698, 699, 5, 115, 0, 0, 699, 700, 5, 116, 0, 0, 700, 113, 1, 0, 0, 0, 701, 702, 5, 40, 0, 0, 702, 115, 1, 0, 0, 0, 703, 704, 5, 105, 0, 0, 704, 705, 5, 110, 0, 0, 705, 117, 1, 0, 0, 0, 706, 707, 5, 105, 0, 0, 707, 708, 5, 115, 0, 0, 708, 119, 1, 0, 0, 0, 709, 710, 5, 108, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 107, 0, 0, 712, 713, 5, 101, 0, 0, 713, 121, 1, 0, 0, 0, 714, 715, 5, 110, 0, 0, 715, 716, 5, 111, 0, 0, 716, 717, 5, 116, 0, 0, 717, 123, 1, 0, 0, 0, 718, 719, 5, 110, 0, 0, 719, 720, 5, 117, 0, 0, 720, 721, 5, 108, 0, 0, 721, 722, 5, 108, 0, 0, 722, 125, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 117, 0, 0, 725, 726, 5, 108, 0, 0, 726, 727, 5, 108, 0, 0, 727, 728, 5, 115, 0, 0, 728, 127, 1, 0, 0, 0, 729, 730, 5, 111, 0, 0, 730, 731, 5, 114, 0, 0, 731, 129, 1, 0, 0, 0, 732, 733, 5, 63, 0, 0, 733, 131, 1, 0, 0, 0, 734, 735, 5, 114, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 105, 0, 0, 737, 738, 5, 107, 0, 0, 738, 739, 5, 101, 0, 0, 739, 133, 1, 0, 0, 0, 740, 741, 5, 41, 0, 0, 741, 135, 1, 0, 0, 0, 742, 743, 5, 116, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 117, 0, 0, 745, 746, 5, 101, 0, 0, 746, 137, 1, 0, 0, 0, 747, 748, 5, 61, 0, 0, 748, 749, 5, 61, 0, 0, 749, 139, 1, 0, 0, 0, 750, 751, 5, 61, 0, 0, 751, 752, 5, 126, 0, 0, 752, 141, 1, 0, 0, 0, 753, 754, 5, 33, 0, 0, 754, 755, 5, 61, 0, 0, 755, 143, 1, 0, 0, 0, 756, 757, 5, 60, 0, 0, 757, 145, 1, 0, 0, 0, 758, 759, 5, 60, 0, 0, 759, 760, 5, 61, 0, 0, 760, 147, 1, 0, 0, 0, 761, 762, 5, 62, 0, 0, 762, 149, 1, 0, 0, 0, 763, 764, 5, 62, 0, 0, 764, 765, 5, 61, 0, 0, 765, 151, 1, 0, 0, 0, 766, 767, 5, 43, 0, 0, 767, 153, 1, 0, 0, 0, 768, 769, 5, 45, 0, 0, 769, 155, 1, 0, 0, 0, 770, 771, 5, 42, 0, 0, 771, 157, 1, 0, 0, 0, 772, 773, 5, 47, 0, 0, 773, 159, 1, 0, 0, 0, 774, 775, 5, 37, 0, 0, 775, 161, 1, 0, 0, 0, 776, 777, 5, 91, 0, 0, 777, 778, 1, 0, 0, 0, 778, 779, 6, 75, 0, 0, 779, 780, 6, 75, 0, 0, 780, 163, 1, 0, 0, 0, 781, 782, 5, 93, 0, 0, 782, 783, 1, 0, 0, 0, 783, 784, 6, 76, 13, 0, 784, 785, 6, 76, 13, 0, 785, 165, 1, 0, 0, 0, 786, 790, 3, 70, 29, 0, 787, 789, 3, 86, 37, 0, 788, 787, 1, 0, 0, 0, 789, 792, 1, 0, 0, 0, 790, 788, 1, 0, 0, 0, 790, 791, 1, 0, 0, 0, 791, 803, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 793, 796, 3, 84, 36, 0, 794, 796, 3, 78, 33, 0, 795, 793, 1, 0, 0, 0, 795, 794, 1, 0, 0, 0, 796, 798, 1, 0, 0, 0, 797, 799, 3, 86, 37, 0, 798, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 798, 1, 0, 0, 0, 800, 801, 1, 0, 0, 0, 801, 803, 1, 0, 0, 0, 802, 786, 1, 0, 0, 0, 802, 795, 1, 0, 0, 0, 803, 167, 1, 0, 0, 0, 804, 806, 3, 80, 34, 0, 805, 807, 3, 82, 35, 0, 806, 805, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 811, 3, 80, 34, 0, 811, 169, 1, 0, 0, 0, 812, 813, 3, 168, 78, 0, 813, 171, 1, 0, 0, 0, 814, 815, 3, 50, 19, 0, 815, 816, 1, 0, 0, 0, 816, 817, 6, 80, 9, 0, 817, 173, 1, 0, 0, 0, 818, 819, 3, 52, 20, 0, 819, 820, 1, 0, 0, 0, 820, 821, 6, 81, 9, 0, 821, 175, 1, 0, 0, 0, 822, 823, 3, 54, 21, 0, 823, 824, 1, 0, 0, 0, 824, 825, 6, 82, 9, 0, 825, 177, 1, 0, 0, 0, 826, 827, 3, 66, 27, 0, 827, 828, 1, 0, 0, 0, 828, 829, 6, 83, 12, 0, 829, 830, 6, 83, 13, 0, 830, 179, 1, 0, 0, 0, 831, 832, 3, 162, 75, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 84, 10, 0, 834, 181, 1, 0, 0, 0, 835, 836, 3, 164, 76, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 85, 14, 0, 838, 183, 1, 0, 0, 0, 839, 840, 3, 102, 45, 0, 840, 841, 1, 0, 0, 0, 841, 842, 6, 86, 15, 0, 842, 185, 1, 0, 0, 0, 843, 844, 3, 100, 44, 0, 844, 845, 1, 0, 0, 0, 845, 846, 6, 87, 16, 0, 846, 187, 1, 0, 0, 0, 847, 848, 5, 109, 0, 0, 848, 849, 5, 101, 0, 0, 849, 850, 5, 116, 0, 0, 850, 851, 5, 97, 0, 0, 851, 852, 5, 100, 0, 0, 852, 853, 5, 97, 0, 0, 853, 854, 5, 116, 0, 0, 854, 855, 5, 97, 0, 0, 855, 189, 1, 0, 0, 0, 856, 860, 8, 10, 0, 0, 857, 858, 5, 47, 0, 0, 858, 860, 8, 11, 0, 0, 859, 856, 1, 0, 0, 0, 859, 857, 1, 0, 0, 0, 860, 191, 1, 0, 0, 0, 861, 863, 3, 190, 89, 0, 862, 861, 1, 0, 0, 0, 863, 864, 1, 0, 0, 0, 864, 862, 1, 0, 0, 0, 864, 865, 1, 0, 0, 0, 865, 193, 1, 0, 0, 0, 866, 867, 3, 170, 79, 0, 867, 868, 1, 0, 0, 0, 868, 869, 6, 91, 17, 0, 869, 195, 1, 0, 0, 0, 870, 871, 3, 50, 19, 0, 871, 872, 1, 0, 0, 0, 872, 873, 6, 92, 9, 0, 873, 197, 1, 0, 0, 0, 874, 875, 3, 52, 20, 0, 875, 876, 1, 0, 0, 0, 876, 877, 6, 93, 9, 0, 877, 199, 1, 0, 0, 0, 878, 879, 3, 54, 21, 0, 879, 880, 1, 0, 0, 0, 880, 881, 6, 94, 9, 0, 881, 201, 1, 0, 0, 0, 882, 883, 3, 66, 27, 0, 883, 884, 1, 0, 0, 0, 884, 885, 6, 95, 12, 0, 885, 886, 6, 95, 13, 0, 886, 203, 1, 0, 0, 0, 887, 888, 3, 106, 47, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 96, 18, 0, 890, 205, 1, 0, 0, 0, 891, 892, 3, 102, 45, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 97, 15, 0, 894, 207, 1, 0, 0, 0, 895, 900, 3, 70, 29, 0, 896, 900, 3, 68, 28, 0, 897, 900, 3, 84, 36, 0, 898, 900, 3, 156, 72, 0, 899, 895, 1, 0, 0, 0, 899, 896, 1, 0, 0, 0, 899, 897, 1, 0, 0, 0, 899, 898, 1, 0, 0, 0, 900, 209, 1, 0, 0, 0, 901, 904, 3, 70, 29, 0, 902, 904, 3, 156, 72, 0, 903, 901, 1, 0, 0, 0, 903, 902, 1, 0, 0, 0, 904, 908, 1, 0, 0, 0, 905, 907, 3, 208, 98, 0, 906, 905, 1, 0, 0, 0, 907, 910, 1, 0, 0, 0, 908, 906, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 921, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 911, 914, 3, 84, 36, 0, 912, 914, 3, 78, 33, 0, 913, 911, 1, 0, 0, 0, 913, 912, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 917, 3, 208, 98, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 921, 1, 0, 0, 0, 920, 903, 1, 0, 0, 0, 920, 913, 1, 0, 0, 0, 921, 211, 1, 0, 0, 0, 922, 925, 3, 210, 99, 0, 923, 925, 3, 168, 78, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 926, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 213, 1, 0, 0, 0, 928, 929, 3, 50, 19, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 101, 9, 0, 931, 215, 1, 0, 0, 0, 932, 933, 3, 52, 20, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 102, 9, 0, 935, 217, 1, 0, 0, 0, 936, 937, 3, 54, 21, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 103, 9, 0, 939, 219, 1, 0, 0, 0, 940, 941, 3, 66, 27, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 104, 12, 0, 943, 944, 6, 104, 13, 0, 944, 221, 1, 0, 0, 0, 945, 946, 3, 100, 44, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 105, 16, 0, 948, 223, 1, 0, 0, 0, 949, 950, 3, 102, 45, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 106, 15, 0, 952, 225, 1, 0, 0, 0, 953, 954, 3, 106, 47, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 107, 18, 0, 956, 227, 1, 0, 0, 0, 957, 958, 5, 97, 0, 0, 958, 959, 5, 115, 0, 0, 959, 229, 1, 0, 0, 0, 960, 961, 3, 212, 100, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 109, 19, 0, 963, 231, 1, 0, 0, 0, 964, 965, 3, 50, 19, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 110, 9, 0, 967, 233, 1, 0, 0, 0, 968, 969, 3, 52, 20, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 111, 9, 0, 971, 235, 1, 0, 0, 0, 972, 973, 3, 54, 21, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 112, 9, 0, 975, 237, 1, 0, 0, 0, 976, 977, 3, 66, 27, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 113, 12, 0, 979, 980, 6, 113, 13, 0, 980, 239, 1, 0, 0, 0, 981, 982, 3, 162, 75, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 114, 10, 0, 984, 985, 6, 114, 20, 0, 985, 241, 1, 0, 0, 0, 986, 987, 5, 111, 0, 0, 987, 988, 5, 110, 0, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 115, 21, 0, 990, 243, 1, 0, 0, 0, 991, 992, 5, 119, 0, 0, 992, 993, 5, 105, 0, 0, 993, 994, 5, 116, 0, 0, 994, 995, 5, 104, 0, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 116, 21, 0, 997, 245, 1, 0, 0, 0, 998, 999, 8, 12, 0, 0, 999, 247, 1, 0, 0, 0, 1000, 1002, 3, 246, 117, 0, 1001, 1000, 1, 0, 0, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 3, 316, 152, 0, 1006, 1008, 1, 0, 0, 0, 1007, 1001, 1, 0, 0, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1010, 1, 0, 0, 0, 1009, 1011, 3, 246, 117, 0, 1010, 1009, 1, 0, 0, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1010, 1, 0, 0, 0, 1012, 1013, 1, 0, 0, 0, 1013, 249, 1, 0, 0, 0, 1014, 1015, 3, 170, 79, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 119, 17, 0, 1017, 251, 1, 0, 0, 0, 1018, 1019, 3, 248, 118, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 120, 22, 0, 1021, 253, 1, 0, 0, 0, 1022, 1023, 3, 50, 19, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 121, 9, 0, 1025, 255, 1, 0, 0, 0, 1026, 1027, 3, 52, 20, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 122, 9, 0, 1029, 257, 1, 0, 0, 0, 1030, 1031, 3, 54, 21, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 123, 9, 0, 1033, 259, 1, 0, 0, 0, 1034, 1035, 3, 66, 27, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 124, 12, 0, 1037, 1038, 6, 124, 13, 0, 1038, 1039, 6, 124, 13, 0, 1039, 261, 1, 0, 0, 0, 1040, 1041, 3, 100, 44, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 125, 16, 0, 1043, 263, 1, 0, 0, 0, 1044, 1045, 3, 102, 45, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 126, 15, 0, 1047, 265, 1, 0, 0, 0, 1048, 1049, 3, 106, 47, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 127, 18, 0, 1051, 267, 1, 0, 0, 0, 1052, 1053, 3, 244, 116, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1055, 6, 128, 23, 0, 1055, 269, 1, 0, 0, 0, 1056, 1057, 3, 212, 100, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 129, 19, 0, 1059, 271, 1, 0, 0, 0, 1060, 1061, 3, 170, 79, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 130, 17, 0, 1063, 273, 1, 0, 0, 0, 1064, 1065, 3, 50, 19, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 131, 9, 0, 1067, 275, 1, 0, 0, 0, 1068, 1069, 3, 52, 20, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 132, 9, 0, 1071, 277, 1, 0, 0, 0, 1072, 1073, 3, 54, 21, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 133, 9, 0, 1075, 279, 1, 0, 0, 0, 1076, 1077, 3, 66, 27, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 134, 12, 0, 1079, 1080, 6, 134, 13, 0, 1080, 281, 1, 0, 0, 0, 1081, 1082, 3, 106, 47, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 135, 18, 0, 1084, 283, 1, 0, 0, 0, 1085, 1086, 3, 170, 79, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 136, 17, 0, 1088, 285, 1, 0, 0, 0, 1089, 1090, 3, 166, 77, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 137, 24, 0, 1092, 287, 1, 0, 0, 0, 1093, 1094, 3, 50, 19, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 138, 9, 0, 1096, 289, 1, 0, 0, 0, 1097, 1098, 3, 52, 20, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 139, 9, 0, 1100, 291, 1, 0, 0, 0, 1101, 1102, 3, 54, 21, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 140, 9, 0, 1104, 293, 1, 0, 0, 0, 1105, 1106, 3, 66, 27, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 141, 12, 0, 1108, 1109, 6, 141, 13, 0, 1109, 295, 1, 0, 0, 0, 1110, 1111, 5, 105, 0, 0, 1111, 1112, 5, 110, 0, 0, 1112, 1113, 5, 102, 0, 0, 1113, 1114, 5, 111, 0, 0, 1114, 297, 1, 0, 0, 0, 1115, 1116, 3, 50, 19, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 143, 9, 0, 1118, 299, 1, 0, 0, 0, 1119, 1120, 3, 52, 20, 0, 1120, 1121, 1, 0, 0, 0, 1121, 1122, 6, 144, 9, 0, 1122, 301, 1, 0, 0, 0, 1123, 1124, 3, 54, 21, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 145, 9, 0, 1126, 303, 1, 0, 0, 0, 1127, 1128, 3, 66, 27, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 146, 12, 0, 1130, 1131, 6, 146, 13, 0, 1131, 305, 1, 0, 0, 0, 1132, 1133, 5, 102, 0, 0, 1133, 1134, 5, 117, 0, 0, 1134, 1135, 5, 110, 0, 0, 1135, 1136, 5, 99, 0, 0, 1136, 1137, 5, 116, 0, 0, 1137, 1138, 5, 105, 0, 0, 1138, 1139, 5, 111, 0, 0, 1139, 1140, 5, 110, 0, 0, 1140, 1141, 5, 115, 0, 0, 1141, 307, 1, 0, 0, 0, 1142, 1143, 3, 50, 19, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 148, 9, 0, 1145, 309, 1, 0, 0, 0, 1146, 1147, 3, 52, 20, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 149, 9, 0, 1149, 311, 1, 0, 0, 0, 1150, 1151, 3, 54, 21, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 150, 9, 0, 1153, 313, 1, 0, 0, 0, 1154, 1155, 3, 164, 76, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 151, 14, 0, 1157, 1158, 6, 151, 13, 0, 1158, 315, 1, 0, 0, 0, 1159, 1160, 5, 58, 0, 0, 1160, 317, 1, 0, 0, 0, 1161, 1167, 3, 78, 33, 0, 1162, 1167, 3, 68, 28, 0, 1163, 1167, 3, 106, 47, 0, 1164, 1167, 3, 70, 29, 0, 1165, 1167, 3, 84, 36, 0, 1166, 1161, 1, 0, 0, 0, 1166, 1162, 1, 0, 0, 0, 1166, 1163, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1165, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1166, 1, 0, 0, 0, 1168, 1169, 1, 0, 0, 0, 1169, 319, 1, 0, 0, 0, 1170, 1171, 3, 50, 19, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 154, 9, 0, 1173, 321, 1, 0, 0, 0, 1174, 1175, 3, 52, 20, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 155, 9, 0, 1177, 323, 1, 0, 0, 0, 1178, 1179, 3, 54, 21, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 156, 9, 0, 1181, 325, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 479, 489, 493, 496, 505, 507, 518, 559, 564, 573, 580, 585, 587, 598, 606, 609, 611, 616, 621, 627, 634, 639, 645, 648, 656, 660, 790, 795, 800, 802, 808, 859, 864, 899, 903, 908, 913, 918, 920, 924, 926, 1003, 1007, 1012, 1166, 1168, 25, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 64, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 65, 0, 7, 34, 0, 7, 33, 0, 7, 67, 0, 7, 36, 0, 7, 76, 0, 5, 11, 0, 5, 7, 0, 7, 86, 0, 7, 85, 0, 7, 66, 0] \ No newline at end of file +[4, 0, 109, 1198, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 482, 8, 18, 11, 18, 12, 18, 483, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 492, 8, 19, 10, 19, 12, 19, 495, 9, 19, 1, 19, 3, 19, 498, 8, 19, 1, 19, 3, 19, 501, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 510, 8, 20, 10, 20, 12, 20, 513, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 521, 8, 21, 11, 21, 12, 21, 522, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 564, 8, 32, 1, 32, 4, 32, 567, 8, 32, 11, 32, 12, 32, 568, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 578, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 585, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 590, 8, 38, 10, 38, 12, 38, 593, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 601, 8, 38, 10, 38, 12, 38, 604, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 611, 8, 38, 1, 38, 3, 38, 614, 8, 38, 3, 38, 616, 8, 38, 1, 39, 4, 39, 619, 8, 39, 11, 39, 12, 39, 620, 1, 40, 4, 40, 624, 8, 40, 11, 40, 12, 40, 625, 1, 40, 1, 40, 5, 40, 630, 8, 40, 10, 40, 12, 40, 633, 9, 40, 1, 40, 1, 40, 4, 40, 637, 8, 40, 11, 40, 12, 40, 638, 1, 40, 4, 40, 642, 8, 40, 11, 40, 12, 40, 643, 1, 40, 1, 40, 5, 40, 648, 8, 40, 10, 40, 12, 40, 651, 9, 40, 3, 40, 653, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 659, 8, 40, 11, 40, 12, 40, 660, 1, 40, 1, 40, 3, 40, 665, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 793, 8, 77, 10, 77, 12, 77, 796, 9, 77, 1, 77, 1, 77, 3, 77, 800, 8, 77, 1, 77, 4, 77, 803, 8, 77, 11, 77, 12, 77, 804, 3, 77, 807, 8, 77, 1, 78, 1, 78, 4, 78, 811, 8, 78, 11, 78, 12, 78, 812, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 3, 91, 876, 8, 91, 1, 92, 4, 92, 879, 8, 92, 11, 92, 12, 92, 880, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 3, 100, 916, 8, 100, 1, 101, 1, 101, 3, 101, 920, 8, 101, 1, 101, 5, 101, 923, 8, 101, 10, 101, 12, 101, 926, 9, 101, 1, 101, 1, 101, 3, 101, 930, 8, 101, 1, 101, 4, 101, 933, 8, 101, 11, 101, 12, 101, 934, 3, 101, 937, 8, 101, 1, 102, 1, 102, 4, 102, 941, 8, 102, 11, 102, 12, 102, 942, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 120, 4, 120, 1018, 8, 120, 11, 120, 12, 120, 1019, 1, 120, 1, 120, 3, 120, 1024, 8, 120, 1, 120, 4, 120, 1027, 8, 120, 11, 120, 12, 120, 1028, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 4, 155, 1183, 8, 155, 11, 155, 12, 155, 1184, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 2, 511, 602, 0, 159, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 0, 170, 67, 172, 68, 174, 69, 176, 70, 178, 0, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 71, 192, 72, 194, 0, 196, 73, 198, 0, 200, 74, 202, 75, 204, 76, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 77, 218, 78, 220, 79, 222, 80, 224, 0, 226, 0, 228, 0, 230, 0, 232, 81, 234, 0, 236, 82, 238, 83, 240, 84, 242, 0, 244, 0, 246, 85, 248, 86, 250, 0, 252, 87, 254, 0, 256, 0, 258, 88, 260, 89, 262, 90, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 91, 280, 92, 282, 93, 284, 0, 286, 0, 288, 0, 290, 0, 292, 94, 294, 95, 296, 96, 298, 0, 300, 97, 302, 98, 304, 99, 306, 100, 308, 0, 310, 101, 312, 102, 314, 103, 316, 104, 318, 0, 320, 105, 322, 106, 324, 107, 326, 108, 328, 109, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1225, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 3, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 12, 330, 1, 0, 0, 0, 14, 340, 1, 0, 0, 0, 16, 347, 1, 0, 0, 0, 18, 356, 1, 0, 0, 0, 20, 363, 1, 0, 0, 0, 22, 373, 1, 0, 0, 0, 24, 380, 1, 0, 0, 0, 26, 387, 1, 0, 0, 0, 28, 401, 1, 0, 0, 0, 30, 408, 1, 0, 0, 0, 32, 416, 1, 0, 0, 0, 34, 423, 1, 0, 0, 0, 36, 435, 1, 0, 0, 0, 38, 444, 1, 0, 0, 0, 40, 450, 1, 0, 0, 0, 42, 457, 1, 0, 0, 0, 44, 464, 1, 0, 0, 0, 46, 472, 1, 0, 0, 0, 48, 481, 1, 0, 0, 0, 50, 487, 1, 0, 0, 0, 52, 504, 1, 0, 0, 0, 54, 520, 1, 0, 0, 0, 56, 526, 1, 0, 0, 0, 58, 531, 1, 0, 0, 0, 60, 536, 1, 0, 0, 0, 62, 540, 1, 0, 0, 0, 64, 544, 1, 0, 0, 0, 66, 548, 1, 0, 0, 0, 68, 552, 1, 0, 0, 0, 70, 554, 1, 0, 0, 0, 72, 556, 1, 0, 0, 0, 74, 559, 1, 0, 0, 0, 76, 561, 1, 0, 0, 0, 78, 570, 1, 0, 0, 0, 80, 572, 1, 0, 0, 0, 82, 577, 1, 0, 0, 0, 84, 579, 1, 0, 0, 0, 86, 584, 1, 0, 0, 0, 88, 615, 1, 0, 0, 0, 90, 618, 1, 0, 0, 0, 92, 664, 1, 0, 0, 0, 94, 666, 1, 0, 0, 0, 96, 669, 1, 0, 0, 0, 98, 673, 1, 0, 0, 0, 100, 677, 1, 0, 0, 0, 102, 679, 1, 0, 0, 0, 104, 681, 1, 0, 0, 0, 106, 686, 1, 0, 0, 0, 108, 688, 1, 0, 0, 0, 110, 694, 1, 0, 0, 0, 112, 700, 1, 0, 0, 0, 114, 705, 1, 0, 0, 0, 116, 707, 1, 0, 0, 0, 118, 710, 1, 0, 0, 0, 120, 713, 1, 0, 0, 0, 122, 718, 1, 0, 0, 0, 124, 722, 1, 0, 0, 0, 126, 727, 1, 0, 0, 0, 128, 733, 1, 0, 0, 0, 130, 736, 1, 0, 0, 0, 132, 738, 1, 0, 0, 0, 134, 744, 1, 0, 0, 0, 136, 746, 1, 0, 0, 0, 138, 751, 1, 0, 0, 0, 140, 754, 1, 0, 0, 0, 142, 757, 1, 0, 0, 0, 144, 760, 1, 0, 0, 0, 146, 762, 1, 0, 0, 0, 148, 765, 1, 0, 0, 0, 150, 767, 1, 0, 0, 0, 152, 770, 1, 0, 0, 0, 154, 772, 1, 0, 0, 0, 156, 774, 1, 0, 0, 0, 158, 776, 1, 0, 0, 0, 160, 778, 1, 0, 0, 0, 162, 780, 1, 0, 0, 0, 164, 785, 1, 0, 0, 0, 166, 806, 1, 0, 0, 0, 168, 808, 1, 0, 0, 0, 170, 816, 1, 0, 0, 0, 172, 818, 1, 0, 0, 0, 174, 822, 1, 0, 0, 0, 176, 826, 1, 0, 0, 0, 178, 830, 1, 0, 0, 0, 180, 835, 1, 0, 0, 0, 182, 839, 1, 0, 0, 0, 184, 843, 1, 0, 0, 0, 186, 847, 1, 0, 0, 0, 188, 851, 1, 0, 0, 0, 190, 855, 1, 0, 0, 0, 192, 863, 1, 0, 0, 0, 194, 875, 1, 0, 0, 0, 196, 878, 1, 0, 0, 0, 198, 882, 1, 0, 0, 0, 200, 886, 1, 0, 0, 0, 202, 890, 1, 0, 0, 0, 204, 894, 1, 0, 0, 0, 206, 898, 1, 0, 0, 0, 208, 903, 1, 0, 0, 0, 210, 907, 1, 0, 0, 0, 212, 915, 1, 0, 0, 0, 214, 936, 1, 0, 0, 0, 216, 940, 1, 0, 0, 0, 218, 944, 1, 0, 0, 0, 220, 948, 1, 0, 0, 0, 222, 952, 1, 0, 0, 0, 224, 956, 1, 0, 0, 0, 226, 961, 1, 0, 0, 0, 228, 965, 1, 0, 0, 0, 230, 969, 1, 0, 0, 0, 232, 973, 1, 0, 0, 0, 234, 976, 1, 0, 0, 0, 236, 980, 1, 0, 0, 0, 238, 984, 1, 0, 0, 0, 240, 988, 1, 0, 0, 0, 242, 992, 1, 0, 0, 0, 244, 997, 1, 0, 0, 0, 246, 1002, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1014, 1, 0, 0, 0, 252, 1023, 1, 0, 0, 0, 254, 1030, 1, 0, 0, 0, 256, 1034, 1, 0, 0, 0, 258, 1038, 1, 0, 0, 0, 260, 1042, 1, 0, 0, 0, 262, 1046, 1, 0, 0, 0, 264, 1050, 1, 0, 0, 0, 266, 1056, 1, 0, 0, 0, 268, 1060, 1, 0, 0, 0, 270, 1064, 1, 0, 0, 0, 272, 1068, 1, 0, 0, 0, 274, 1072, 1, 0, 0, 0, 276, 1076, 1, 0, 0, 0, 278, 1080, 1, 0, 0, 0, 280, 1084, 1, 0, 0, 0, 282, 1088, 1, 0, 0, 0, 284, 1092, 1, 0, 0, 0, 286, 1097, 1, 0, 0, 0, 288, 1101, 1, 0, 0, 0, 290, 1105, 1, 0, 0, 0, 292, 1109, 1, 0, 0, 0, 294, 1113, 1, 0, 0, 0, 296, 1117, 1, 0, 0, 0, 298, 1121, 1, 0, 0, 0, 300, 1126, 1, 0, 0, 0, 302, 1131, 1, 0, 0, 0, 304, 1135, 1, 0, 0, 0, 306, 1139, 1, 0, 0, 0, 308, 1143, 1, 0, 0, 0, 310, 1148, 1, 0, 0, 0, 312, 1158, 1, 0, 0, 0, 314, 1162, 1, 0, 0, 0, 316, 1166, 1, 0, 0, 0, 318, 1170, 1, 0, 0, 0, 320, 1175, 1, 0, 0, 0, 322, 1182, 1, 0, 0, 0, 324, 1186, 1, 0, 0, 0, 326, 1190, 1, 0, 0, 0, 328, 1194, 1, 0, 0, 0, 330, 331, 5, 100, 0, 0, 331, 332, 5, 105, 0, 0, 332, 333, 5, 115, 0, 0, 333, 334, 5, 115, 0, 0, 334, 335, 5, 101, 0, 0, 335, 336, 5, 99, 0, 0, 336, 337, 5, 116, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 6, 0, 0, 0, 339, 13, 1, 0, 0, 0, 340, 341, 5, 100, 0, 0, 341, 342, 5, 114, 0, 0, 342, 343, 5, 111, 0, 0, 343, 344, 5, 112, 0, 0, 344, 345, 1, 0, 0, 0, 345, 346, 6, 1, 1, 0, 346, 15, 1, 0, 0, 0, 347, 348, 5, 101, 0, 0, 348, 349, 5, 110, 0, 0, 349, 350, 5, 114, 0, 0, 350, 351, 5, 105, 0, 0, 351, 352, 5, 99, 0, 0, 352, 353, 5, 104, 0, 0, 353, 354, 1, 0, 0, 0, 354, 355, 6, 2, 2, 0, 355, 17, 1, 0, 0, 0, 356, 357, 5, 101, 0, 0, 357, 358, 5, 118, 0, 0, 358, 359, 5, 97, 0, 0, 359, 360, 5, 108, 0, 0, 360, 361, 1, 0, 0, 0, 361, 362, 6, 3, 0, 0, 362, 19, 1, 0, 0, 0, 363, 364, 5, 101, 0, 0, 364, 365, 5, 120, 0, 0, 365, 366, 5, 112, 0, 0, 366, 367, 5, 108, 0, 0, 367, 368, 5, 97, 0, 0, 368, 369, 5, 105, 0, 0, 369, 370, 5, 110, 0, 0, 370, 371, 1, 0, 0, 0, 371, 372, 6, 4, 3, 0, 372, 21, 1, 0, 0, 0, 373, 374, 5, 102, 0, 0, 374, 375, 5, 114, 0, 0, 375, 376, 5, 111, 0, 0, 376, 377, 5, 109, 0, 0, 377, 378, 1, 0, 0, 0, 378, 379, 6, 5, 4, 0, 379, 23, 1, 0, 0, 0, 380, 381, 5, 103, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 111, 0, 0, 383, 384, 5, 107, 0, 0, 384, 385, 1, 0, 0, 0, 385, 386, 6, 6, 0, 0, 386, 25, 1, 0, 0, 0, 387, 388, 5, 105, 0, 0, 388, 389, 5, 110, 0, 0, 389, 390, 5, 108, 0, 0, 390, 391, 5, 105, 0, 0, 391, 392, 5, 110, 0, 0, 392, 393, 5, 101, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 5, 116, 0, 0, 395, 396, 5, 97, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 115, 0, 0, 398, 399, 1, 0, 0, 0, 399, 400, 6, 7, 0, 0, 400, 27, 1, 0, 0, 0, 401, 402, 5, 107, 0, 0, 402, 403, 5, 101, 0, 0, 403, 404, 5, 101, 0, 0, 404, 405, 5, 112, 0, 0, 405, 406, 1, 0, 0, 0, 406, 407, 6, 8, 1, 0, 407, 29, 1, 0, 0, 0, 408, 409, 5, 108, 0, 0, 409, 410, 5, 105, 0, 0, 410, 411, 5, 109, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 116, 0, 0, 413, 414, 1, 0, 0, 0, 414, 415, 6, 9, 0, 0, 415, 31, 1, 0, 0, 0, 416, 417, 5, 109, 0, 0, 417, 418, 5, 101, 0, 0, 418, 419, 5, 116, 0, 0, 419, 420, 5, 97, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 10, 5, 0, 422, 33, 1, 0, 0, 0, 423, 424, 5, 109, 0, 0, 424, 425, 5, 118, 0, 0, 425, 426, 5, 95, 0, 0, 426, 427, 5, 101, 0, 0, 427, 428, 5, 120, 0, 0, 428, 429, 5, 112, 0, 0, 429, 430, 5, 97, 0, 0, 430, 431, 5, 110, 0, 0, 431, 432, 5, 100, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 11, 6, 0, 434, 35, 1, 0, 0, 0, 435, 436, 5, 114, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 5, 110, 0, 0, 438, 439, 5, 97, 0, 0, 439, 440, 5, 109, 0, 0, 440, 441, 5, 101, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 12, 7, 0, 443, 37, 1, 0, 0, 0, 444, 445, 5, 114, 0, 0, 445, 446, 5, 111, 0, 0, 446, 447, 5, 119, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 6, 13, 0, 0, 449, 39, 1, 0, 0, 0, 450, 451, 5, 115, 0, 0, 451, 452, 5, 104, 0, 0, 452, 453, 5, 111, 0, 0, 453, 454, 5, 119, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 6, 14, 8, 0, 456, 41, 1, 0, 0, 0, 457, 458, 5, 115, 0, 0, 458, 459, 5, 111, 0, 0, 459, 460, 5, 114, 0, 0, 460, 461, 5, 116, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 6, 15, 0, 0, 463, 43, 1, 0, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 5, 116, 0, 0, 466, 467, 5, 97, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 115, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 16, 0, 0, 471, 45, 1, 0, 0, 0, 472, 473, 5, 119, 0, 0, 473, 474, 5, 104, 0, 0, 474, 475, 5, 101, 0, 0, 475, 476, 5, 114, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 6, 17, 0, 0, 479, 47, 1, 0, 0, 0, 480, 482, 8, 0, 0, 0, 481, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 18, 0, 0, 486, 49, 1, 0, 0, 0, 487, 488, 5, 47, 0, 0, 488, 489, 5, 47, 0, 0, 489, 493, 1, 0, 0, 0, 490, 492, 8, 1, 0, 0, 491, 490, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 498, 5, 13, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 500, 1, 0, 0, 0, 499, 501, 5, 10, 0, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 19, 9, 0, 503, 51, 1, 0, 0, 0, 504, 505, 5, 47, 0, 0, 505, 506, 5, 42, 0, 0, 506, 511, 1, 0, 0, 0, 507, 510, 3, 52, 20, 0, 508, 510, 9, 0, 0, 0, 509, 507, 1, 0, 0, 0, 509, 508, 1, 0, 0, 0, 510, 513, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 511, 509, 1, 0, 0, 0, 512, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 515, 5, 42, 0, 0, 515, 516, 5, 47, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 20, 9, 0, 518, 53, 1, 0, 0, 0, 519, 521, 7, 2, 0, 0, 520, 519, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 21, 9, 0, 525, 55, 1, 0, 0, 0, 526, 527, 3, 162, 75, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 22, 10, 0, 529, 530, 6, 22, 11, 0, 530, 57, 1, 0, 0, 0, 531, 532, 3, 66, 27, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 23, 12, 0, 534, 535, 6, 23, 13, 0, 535, 59, 1, 0, 0, 0, 536, 537, 3, 54, 21, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 24, 9, 0, 539, 61, 1, 0, 0, 0, 540, 541, 3, 50, 19, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 25, 9, 0, 543, 63, 1, 0, 0, 0, 544, 545, 3, 52, 20, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 26, 9, 0, 547, 65, 1, 0, 0, 0, 548, 549, 5, 124, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 27, 13, 0, 551, 67, 1, 0, 0, 0, 552, 553, 7, 3, 0, 0, 553, 69, 1, 0, 0, 0, 554, 555, 7, 4, 0, 0, 555, 71, 1, 0, 0, 0, 556, 557, 5, 92, 0, 0, 557, 558, 7, 5, 0, 0, 558, 73, 1, 0, 0, 0, 559, 560, 8, 6, 0, 0, 560, 75, 1, 0, 0, 0, 561, 563, 7, 7, 0, 0, 562, 564, 7, 8, 0, 0, 563, 562, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 566, 1, 0, 0, 0, 565, 567, 3, 68, 28, 0, 566, 565, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 566, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 77, 1, 0, 0, 0, 570, 571, 5, 64, 0, 0, 571, 79, 1, 0, 0, 0, 572, 573, 5, 96, 0, 0, 573, 81, 1, 0, 0, 0, 574, 578, 8, 9, 0, 0, 575, 576, 5, 96, 0, 0, 576, 578, 5, 96, 0, 0, 577, 574, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 578, 83, 1, 0, 0, 0, 579, 580, 5, 95, 0, 0, 580, 85, 1, 0, 0, 0, 581, 585, 3, 70, 29, 0, 582, 585, 3, 68, 28, 0, 583, 585, 3, 84, 36, 0, 584, 581, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 584, 583, 1, 0, 0, 0, 585, 87, 1, 0, 0, 0, 586, 591, 5, 34, 0, 0, 587, 590, 3, 72, 30, 0, 588, 590, 3, 74, 31, 0, 589, 587, 1, 0, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 616, 5, 34, 0, 0, 595, 596, 5, 34, 0, 0, 596, 597, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 602, 1, 0, 0, 0, 599, 601, 8, 1, 0, 0, 600, 599, 1, 0, 0, 0, 601, 604, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 606, 5, 34, 0, 0, 606, 607, 5, 34, 0, 0, 607, 608, 5, 34, 0, 0, 608, 610, 1, 0, 0, 0, 609, 611, 5, 34, 0, 0, 610, 609, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 613, 1, 0, 0, 0, 612, 614, 5, 34, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 616, 1, 0, 0, 0, 615, 586, 1, 0, 0, 0, 615, 595, 1, 0, 0, 0, 616, 89, 1, 0, 0, 0, 617, 619, 3, 68, 28, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 91, 1, 0, 0, 0, 622, 624, 3, 68, 28, 0, 623, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 631, 3, 106, 47, 0, 628, 630, 3, 68, 28, 0, 629, 628, 1, 0, 0, 0, 630, 633, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 665, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 634, 636, 3, 106, 47, 0, 635, 637, 3, 68, 28, 0, 636, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 665, 1, 0, 0, 0, 640, 642, 3, 68, 28, 0, 641, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 652, 1, 0, 0, 0, 645, 649, 3, 106, 47, 0, 646, 648, 3, 68, 28, 0, 647, 646, 1, 0, 0, 0, 648, 651, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 645, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 3, 76, 32, 0, 655, 665, 1, 0, 0, 0, 656, 658, 3, 106, 47, 0, 657, 659, 3, 68, 28, 0, 658, 657, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 658, 1, 0, 0, 0, 660, 661, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 663, 3, 76, 32, 0, 663, 665, 1, 0, 0, 0, 664, 623, 1, 0, 0, 0, 664, 634, 1, 0, 0, 0, 664, 641, 1, 0, 0, 0, 664, 656, 1, 0, 0, 0, 665, 93, 1, 0, 0, 0, 666, 667, 5, 98, 0, 0, 667, 668, 5, 121, 0, 0, 668, 95, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 110, 0, 0, 671, 672, 5, 100, 0, 0, 672, 97, 1, 0, 0, 0, 673, 674, 5, 97, 0, 0, 674, 675, 5, 115, 0, 0, 675, 676, 5, 99, 0, 0, 676, 99, 1, 0, 0, 0, 677, 678, 5, 61, 0, 0, 678, 101, 1, 0, 0, 0, 679, 680, 5, 44, 0, 0, 680, 103, 1, 0, 0, 0, 681, 682, 5, 100, 0, 0, 682, 683, 5, 101, 0, 0, 683, 684, 5, 115, 0, 0, 684, 685, 5, 99, 0, 0, 685, 105, 1, 0, 0, 0, 686, 687, 5, 46, 0, 0, 687, 107, 1, 0, 0, 0, 688, 689, 5, 102, 0, 0, 689, 690, 5, 97, 0, 0, 690, 691, 5, 108, 0, 0, 691, 692, 5, 115, 0, 0, 692, 693, 5, 101, 0, 0, 693, 109, 1, 0, 0, 0, 694, 695, 5, 102, 0, 0, 695, 696, 5, 105, 0, 0, 696, 697, 5, 114, 0, 0, 697, 698, 5, 115, 0, 0, 698, 699, 5, 116, 0, 0, 699, 111, 1, 0, 0, 0, 700, 701, 5, 108, 0, 0, 701, 702, 5, 97, 0, 0, 702, 703, 5, 115, 0, 0, 703, 704, 5, 116, 0, 0, 704, 113, 1, 0, 0, 0, 705, 706, 5, 40, 0, 0, 706, 115, 1, 0, 0, 0, 707, 708, 5, 105, 0, 0, 708, 709, 5, 110, 0, 0, 709, 117, 1, 0, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 115, 0, 0, 712, 119, 1, 0, 0, 0, 713, 714, 5, 108, 0, 0, 714, 715, 5, 105, 0, 0, 715, 716, 5, 107, 0, 0, 716, 717, 5, 101, 0, 0, 717, 121, 1, 0, 0, 0, 718, 719, 5, 110, 0, 0, 719, 720, 5, 111, 0, 0, 720, 721, 5, 116, 0, 0, 721, 123, 1, 0, 0, 0, 722, 723, 5, 110, 0, 0, 723, 724, 5, 117, 0, 0, 724, 725, 5, 108, 0, 0, 725, 726, 5, 108, 0, 0, 726, 125, 1, 0, 0, 0, 727, 728, 5, 110, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 108, 0, 0, 731, 732, 5, 115, 0, 0, 732, 127, 1, 0, 0, 0, 733, 734, 5, 111, 0, 0, 734, 735, 5, 114, 0, 0, 735, 129, 1, 0, 0, 0, 736, 737, 5, 63, 0, 0, 737, 131, 1, 0, 0, 0, 738, 739, 5, 114, 0, 0, 739, 740, 5, 108, 0, 0, 740, 741, 5, 105, 0, 0, 741, 742, 5, 107, 0, 0, 742, 743, 5, 101, 0, 0, 743, 133, 1, 0, 0, 0, 744, 745, 5, 41, 0, 0, 745, 135, 1, 0, 0, 0, 746, 747, 5, 116, 0, 0, 747, 748, 5, 114, 0, 0, 748, 749, 5, 117, 0, 0, 749, 750, 5, 101, 0, 0, 750, 137, 1, 0, 0, 0, 751, 752, 5, 61, 0, 0, 752, 753, 5, 61, 0, 0, 753, 139, 1, 0, 0, 0, 754, 755, 5, 61, 0, 0, 755, 756, 5, 126, 0, 0, 756, 141, 1, 0, 0, 0, 757, 758, 5, 33, 0, 0, 758, 759, 5, 61, 0, 0, 759, 143, 1, 0, 0, 0, 760, 761, 5, 60, 0, 0, 761, 145, 1, 0, 0, 0, 762, 763, 5, 60, 0, 0, 763, 764, 5, 61, 0, 0, 764, 147, 1, 0, 0, 0, 765, 766, 5, 62, 0, 0, 766, 149, 1, 0, 0, 0, 767, 768, 5, 62, 0, 0, 768, 769, 5, 61, 0, 0, 769, 151, 1, 0, 0, 0, 770, 771, 5, 43, 0, 0, 771, 153, 1, 0, 0, 0, 772, 773, 5, 45, 0, 0, 773, 155, 1, 0, 0, 0, 774, 775, 5, 42, 0, 0, 775, 157, 1, 0, 0, 0, 776, 777, 5, 47, 0, 0, 777, 159, 1, 0, 0, 0, 778, 779, 5, 37, 0, 0, 779, 161, 1, 0, 0, 0, 780, 781, 5, 91, 0, 0, 781, 782, 1, 0, 0, 0, 782, 783, 6, 75, 0, 0, 783, 784, 6, 75, 0, 0, 784, 163, 1, 0, 0, 0, 785, 786, 5, 93, 0, 0, 786, 787, 1, 0, 0, 0, 787, 788, 6, 76, 13, 0, 788, 789, 6, 76, 13, 0, 789, 165, 1, 0, 0, 0, 790, 794, 3, 70, 29, 0, 791, 793, 3, 86, 37, 0, 792, 791, 1, 0, 0, 0, 793, 796, 1, 0, 0, 0, 794, 792, 1, 0, 0, 0, 794, 795, 1, 0, 0, 0, 795, 807, 1, 0, 0, 0, 796, 794, 1, 0, 0, 0, 797, 800, 3, 84, 36, 0, 798, 800, 3, 78, 33, 0, 799, 797, 1, 0, 0, 0, 799, 798, 1, 0, 0, 0, 800, 802, 1, 0, 0, 0, 801, 803, 3, 86, 37, 0, 802, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 802, 1, 0, 0, 0, 804, 805, 1, 0, 0, 0, 805, 807, 1, 0, 0, 0, 806, 790, 1, 0, 0, 0, 806, 799, 1, 0, 0, 0, 807, 167, 1, 0, 0, 0, 808, 810, 3, 80, 34, 0, 809, 811, 3, 82, 35, 0, 810, 809, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 810, 1, 0, 0, 0, 812, 813, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 815, 3, 80, 34, 0, 815, 169, 1, 0, 0, 0, 816, 817, 3, 168, 78, 0, 817, 171, 1, 0, 0, 0, 818, 819, 3, 50, 19, 0, 819, 820, 1, 0, 0, 0, 820, 821, 6, 80, 9, 0, 821, 173, 1, 0, 0, 0, 822, 823, 3, 52, 20, 0, 823, 824, 1, 0, 0, 0, 824, 825, 6, 81, 9, 0, 825, 175, 1, 0, 0, 0, 826, 827, 3, 54, 21, 0, 827, 828, 1, 0, 0, 0, 828, 829, 6, 82, 9, 0, 829, 177, 1, 0, 0, 0, 830, 831, 3, 66, 27, 0, 831, 832, 1, 0, 0, 0, 832, 833, 6, 83, 12, 0, 833, 834, 6, 83, 13, 0, 834, 179, 1, 0, 0, 0, 835, 836, 3, 162, 75, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 84, 10, 0, 838, 181, 1, 0, 0, 0, 839, 840, 3, 164, 76, 0, 840, 841, 1, 0, 0, 0, 841, 842, 6, 85, 14, 0, 842, 183, 1, 0, 0, 0, 843, 844, 3, 102, 45, 0, 844, 845, 1, 0, 0, 0, 845, 846, 6, 86, 15, 0, 846, 185, 1, 0, 0, 0, 847, 848, 3, 100, 44, 0, 848, 849, 1, 0, 0, 0, 849, 850, 6, 87, 16, 0, 850, 187, 1, 0, 0, 0, 851, 852, 3, 88, 38, 0, 852, 853, 1, 0, 0, 0, 853, 854, 6, 88, 17, 0, 854, 189, 1, 0, 0, 0, 855, 856, 5, 111, 0, 0, 856, 857, 5, 112, 0, 0, 857, 858, 5, 116, 0, 0, 858, 859, 5, 105, 0, 0, 859, 860, 5, 111, 0, 0, 860, 861, 5, 110, 0, 0, 861, 862, 5, 115, 0, 0, 862, 191, 1, 0, 0, 0, 863, 864, 5, 109, 0, 0, 864, 865, 5, 101, 0, 0, 865, 866, 5, 116, 0, 0, 866, 867, 5, 97, 0, 0, 867, 868, 5, 100, 0, 0, 868, 869, 5, 97, 0, 0, 869, 870, 5, 116, 0, 0, 870, 871, 5, 97, 0, 0, 871, 193, 1, 0, 0, 0, 872, 876, 8, 10, 0, 0, 873, 874, 5, 47, 0, 0, 874, 876, 8, 11, 0, 0, 875, 872, 1, 0, 0, 0, 875, 873, 1, 0, 0, 0, 876, 195, 1, 0, 0, 0, 877, 879, 3, 194, 91, 0, 878, 877, 1, 0, 0, 0, 879, 880, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 197, 1, 0, 0, 0, 882, 883, 3, 170, 79, 0, 883, 884, 1, 0, 0, 0, 884, 885, 6, 93, 18, 0, 885, 199, 1, 0, 0, 0, 886, 887, 3, 50, 19, 0, 887, 888, 1, 0, 0, 0, 888, 889, 6, 94, 9, 0, 889, 201, 1, 0, 0, 0, 890, 891, 3, 52, 20, 0, 891, 892, 1, 0, 0, 0, 892, 893, 6, 95, 9, 0, 893, 203, 1, 0, 0, 0, 894, 895, 3, 54, 21, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 96, 9, 0, 897, 205, 1, 0, 0, 0, 898, 899, 3, 66, 27, 0, 899, 900, 1, 0, 0, 0, 900, 901, 6, 97, 12, 0, 901, 902, 6, 97, 13, 0, 902, 207, 1, 0, 0, 0, 903, 904, 3, 106, 47, 0, 904, 905, 1, 0, 0, 0, 905, 906, 6, 98, 19, 0, 906, 209, 1, 0, 0, 0, 907, 908, 3, 102, 45, 0, 908, 909, 1, 0, 0, 0, 909, 910, 6, 99, 15, 0, 910, 211, 1, 0, 0, 0, 911, 916, 3, 70, 29, 0, 912, 916, 3, 68, 28, 0, 913, 916, 3, 84, 36, 0, 914, 916, 3, 156, 72, 0, 915, 911, 1, 0, 0, 0, 915, 912, 1, 0, 0, 0, 915, 913, 1, 0, 0, 0, 915, 914, 1, 0, 0, 0, 916, 213, 1, 0, 0, 0, 917, 920, 3, 70, 29, 0, 918, 920, 3, 156, 72, 0, 919, 917, 1, 0, 0, 0, 919, 918, 1, 0, 0, 0, 920, 924, 1, 0, 0, 0, 921, 923, 3, 212, 100, 0, 922, 921, 1, 0, 0, 0, 923, 926, 1, 0, 0, 0, 924, 922, 1, 0, 0, 0, 924, 925, 1, 0, 0, 0, 925, 937, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 927, 930, 3, 84, 36, 0, 928, 930, 3, 78, 33, 0, 929, 927, 1, 0, 0, 0, 929, 928, 1, 0, 0, 0, 930, 932, 1, 0, 0, 0, 931, 933, 3, 212, 100, 0, 932, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 932, 1, 0, 0, 0, 934, 935, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 919, 1, 0, 0, 0, 936, 929, 1, 0, 0, 0, 937, 215, 1, 0, 0, 0, 938, 941, 3, 214, 101, 0, 939, 941, 3, 168, 78, 0, 940, 938, 1, 0, 0, 0, 940, 939, 1, 0, 0, 0, 941, 942, 1, 0, 0, 0, 942, 940, 1, 0, 0, 0, 942, 943, 1, 0, 0, 0, 943, 217, 1, 0, 0, 0, 944, 945, 3, 50, 19, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 103, 9, 0, 947, 219, 1, 0, 0, 0, 948, 949, 3, 52, 20, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 104, 9, 0, 951, 221, 1, 0, 0, 0, 952, 953, 3, 54, 21, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 105, 9, 0, 955, 223, 1, 0, 0, 0, 956, 957, 3, 66, 27, 0, 957, 958, 1, 0, 0, 0, 958, 959, 6, 106, 12, 0, 959, 960, 6, 106, 13, 0, 960, 225, 1, 0, 0, 0, 961, 962, 3, 100, 44, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 107, 16, 0, 964, 227, 1, 0, 0, 0, 965, 966, 3, 102, 45, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 108, 15, 0, 968, 229, 1, 0, 0, 0, 969, 970, 3, 106, 47, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 109, 19, 0, 972, 231, 1, 0, 0, 0, 973, 974, 5, 97, 0, 0, 974, 975, 5, 115, 0, 0, 975, 233, 1, 0, 0, 0, 976, 977, 3, 216, 102, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 111, 20, 0, 979, 235, 1, 0, 0, 0, 980, 981, 3, 50, 19, 0, 981, 982, 1, 0, 0, 0, 982, 983, 6, 112, 9, 0, 983, 237, 1, 0, 0, 0, 984, 985, 3, 52, 20, 0, 985, 986, 1, 0, 0, 0, 986, 987, 6, 113, 9, 0, 987, 239, 1, 0, 0, 0, 988, 989, 3, 54, 21, 0, 989, 990, 1, 0, 0, 0, 990, 991, 6, 114, 9, 0, 991, 241, 1, 0, 0, 0, 992, 993, 3, 66, 27, 0, 993, 994, 1, 0, 0, 0, 994, 995, 6, 115, 12, 0, 995, 996, 6, 115, 13, 0, 996, 243, 1, 0, 0, 0, 997, 998, 3, 162, 75, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 116, 10, 0, 1000, 1001, 6, 116, 21, 0, 1001, 245, 1, 0, 0, 0, 1002, 1003, 5, 111, 0, 0, 1003, 1004, 5, 110, 0, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 117, 22, 0, 1006, 247, 1, 0, 0, 0, 1007, 1008, 5, 119, 0, 0, 1008, 1009, 5, 105, 0, 0, 1009, 1010, 5, 116, 0, 0, 1010, 1011, 5, 104, 0, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 118, 22, 0, 1013, 249, 1, 0, 0, 0, 1014, 1015, 8, 12, 0, 0, 1015, 251, 1, 0, 0, 0, 1016, 1018, 3, 250, 119, 0, 1017, 1016, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1017, 1, 0, 0, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 1, 0, 0, 0, 1021, 1022, 3, 320, 154, 0, 1022, 1024, 1, 0, 0, 0, 1023, 1017, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1026, 1, 0, 0, 0, 1025, 1027, 3, 250, 119, 0, 1026, 1025, 1, 0, 0, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1026, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, 253, 1, 0, 0, 0, 1030, 1031, 3, 170, 79, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 121, 18, 0, 1033, 255, 1, 0, 0, 0, 1034, 1035, 3, 252, 120, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 122, 23, 0, 1037, 257, 1, 0, 0, 0, 1038, 1039, 3, 50, 19, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 123, 9, 0, 1041, 259, 1, 0, 0, 0, 1042, 1043, 3, 52, 20, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 124, 9, 0, 1045, 261, 1, 0, 0, 0, 1046, 1047, 3, 54, 21, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 125, 9, 0, 1049, 263, 1, 0, 0, 0, 1050, 1051, 3, 66, 27, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 126, 12, 0, 1053, 1054, 6, 126, 13, 0, 1054, 1055, 6, 126, 13, 0, 1055, 265, 1, 0, 0, 0, 1056, 1057, 3, 100, 44, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 127, 16, 0, 1059, 267, 1, 0, 0, 0, 1060, 1061, 3, 102, 45, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 128, 15, 0, 1063, 269, 1, 0, 0, 0, 1064, 1065, 3, 106, 47, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 129, 19, 0, 1067, 271, 1, 0, 0, 0, 1068, 1069, 3, 248, 118, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 130, 24, 0, 1071, 273, 1, 0, 0, 0, 1072, 1073, 3, 216, 102, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 131, 20, 0, 1075, 275, 1, 0, 0, 0, 1076, 1077, 3, 170, 79, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 132, 18, 0, 1079, 277, 1, 0, 0, 0, 1080, 1081, 3, 50, 19, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1083, 6, 133, 9, 0, 1083, 279, 1, 0, 0, 0, 1084, 1085, 3, 52, 20, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 134, 9, 0, 1087, 281, 1, 0, 0, 0, 1088, 1089, 3, 54, 21, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 135, 9, 0, 1091, 283, 1, 0, 0, 0, 1092, 1093, 3, 66, 27, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 136, 12, 0, 1095, 1096, 6, 136, 13, 0, 1096, 285, 1, 0, 0, 0, 1097, 1098, 3, 106, 47, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 137, 19, 0, 1100, 287, 1, 0, 0, 0, 1101, 1102, 3, 170, 79, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 138, 18, 0, 1104, 289, 1, 0, 0, 0, 1105, 1106, 3, 166, 77, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 139, 25, 0, 1108, 291, 1, 0, 0, 0, 1109, 1110, 3, 50, 19, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 140, 9, 0, 1112, 293, 1, 0, 0, 0, 1113, 1114, 3, 52, 20, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 141, 9, 0, 1116, 295, 1, 0, 0, 0, 1117, 1118, 3, 54, 21, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 142, 9, 0, 1120, 297, 1, 0, 0, 0, 1121, 1122, 3, 66, 27, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 143, 12, 0, 1124, 1125, 6, 143, 13, 0, 1125, 299, 1, 0, 0, 0, 1126, 1127, 5, 105, 0, 0, 1127, 1128, 5, 110, 0, 0, 1128, 1129, 5, 102, 0, 0, 1129, 1130, 5, 111, 0, 0, 1130, 301, 1, 0, 0, 0, 1131, 1132, 3, 50, 19, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 145, 9, 0, 1134, 303, 1, 0, 0, 0, 1135, 1136, 3, 52, 20, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 146, 9, 0, 1138, 305, 1, 0, 0, 0, 1139, 1140, 3, 54, 21, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 147, 9, 0, 1142, 307, 1, 0, 0, 0, 1143, 1144, 3, 66, 27, 0, 1144, 1145, 1, 0, 0, 0, 1145, 1146, 6, 148, 12, 0, 1146, 1147, 6, 148, 13, 0, 1147, 309, 1, 0, 0, 0, 1148, 1149, 5, 102, 0, 0, 1149, 1150, 5, 117, 0, 0, 1150, 1151, 5, 110, 0, 0, 1151, 1152, 5, 99, 0, 0, 1152, 1153, 5, 116, 0, 0, 1153, 1154, 5, 105, 0, 0, 1154, 1155, 5, 111, 0, 0, 1155, 1156, 5, 110, 0, 0, 1156, 1157, 5, 115, 0, 0, 1157, 311, 1, 0, 0, 0, 1158, 1159, 3, 50, 19, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 150, 9, 0, 1161, 313, 1, 0, 0, 0, 1162, 1163, 3, 52, 20, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 151, 9, 0, 1165, 315, 1, 0, 0, 0, 1166, 1167, 3, 54, 21, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 6, 152, 9, 0, 1169, 317, 1, 0, 0, 0, 1170, 1171, 3, 164, 76, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 153, 14, 0, 1173, 1174, 6, 153, 13, 0, 1174, 319, 1, 0, 0, 0, 1175, 1176, 5, 58, 0, 0, 1176, 321, 1, 0, 0, 0, 1177, 1183, 3, 78, 33, 0, 1178, 1183, 3, 68, 28, 0, 1179, 1183, 3, 106, 47, 0, 1180, 1183, 3, 70, 29, 0, 1181, 1183, 3, 84, 36, 0, 1182, 1177, 1, 0, 0, 0, 1182, 1178, 1, 0, 0, 0, 1182, 1179, 1, 0, 0, 0, 1182, 1180, 1, 0, 0, 0, 1182, 1181, 1, 0, 0, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1182, 1, 0, 0, 0, 1184, 1185, 1, 0, 0, 0, 1185, 323, 1, 0, 0, 0, 1186, 1187, 3, 50, 19, 0, 1187, 1188, 1, 0, 0, 0, 1188, 1189, 6, 156, 9, 0, 1189, 325, 1, 0, 0, 0, 1190, 1191, 3, 52, 20, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 157, 9, 0, 1193, 327, 1, 0, 0, 0, 1194, 1195, 3, 54, 21, 0, 1195, 1196, 1, 0, 0, 0, 1196, 1197, 6, 158, 9, 0, 1197, 329, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 483, 493, 497, 500, 509, 511, 522, 563, 568, 577, 584, 589, 591, 602, 610, 613, 615, 620, 625, 631, 638, 643, 649, 652, 660, 664, 794, 799, 804, 806, 812, 875, 880, 915, 919, 924, 929, 934, 936, 940, 942, 1019, 1023, 1028, 1182, 1184, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 64, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 65, 0, 7, 34, 0, 7, 33, 0, 7, 27, 0, 7, 67, 0, 7, 36, 0, 7, 77, 0, 5, 11, 0, 5, 7, 0, 7, 87, 0, 7, 86, 0, 7, 66, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 3107ec6259dbc..358bb431e468e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -21,23 +21,23 @@ public class EsqlBaseLexer extends Lexer { KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, - ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, - LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, - RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, - GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, + PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, + AND=31, ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, + LAST=39, LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, + PARAM=48, RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, + GT=57, GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, - EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, - FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, ID_PATTERN=76, - PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, - AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, - ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, - ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, - ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, - MVEXPAND_WS=95, INFO=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, - SHOW_WS=99, FUNCTIONS=100, META_LINE_COMMENT=101, META_MULTILINE_COMMENT=102, - META_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, - SETTING_WS=108; + EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, OPTIONS=71, METADATA=72, FROM_UNQUOTED_IDENTIFIER=73, + FROM_LINE_COMMENT=74, FROM_MULTILINE_COMMENT=75, FROM_WS=76, ID_PATTERN=77, + PROJECT_LINE_COMMENT=78, PROJECT_MULTILINE_COMMENT=79, PROJECT_WS=80, + AS=81, RENAME_LINE_COMMENT=82, RENAME_MULTILINE_COMMENT=83, RENAME_WS=84, + ON=85, WITH=86, ENRICH_POLICY_NAME=87, ENRICH_LINE_COMMENT=88, ENRICH_MULTILINE_COMMENT=89, + ENRICH_WS=90, ENRICH_FIELD_LINE_COMMENT=91, ENRICH_FIELD_MULTILINE_COMMENT=92, + ENRICH_FIELD_WS=93, MVEXPAND_LINE_COMMENT=94, MVEXPAND_MULTILINE_COMMENT=95, + MVEXPAND_WS=96, INFO=97, SHOW_LINE_COMMENT=98, SHOW_MULTILINE_COMMENT=99, + SHOW_WS=100, FUNCTIONS=101, META_LINE_COMMENT=102, META_MULTILINE_COMMENT=103, + META_WS=104, COLON=105, SETTING=106, SETTING_LINE_COMMENT=107, SETTTING_MULTILINE_COMMENT=108, + SETTING_WS=109; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, META_MODE=10, @@ -60,14 +60,15 @@ private static String[] makeRuleNames() { "WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", - "UNDERSCORE", "UNQUOTED_ID_BODY", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", - "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", - "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", - "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", - "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", + "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", + "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", + "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", + "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", + "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", + "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", + "FROM_QUTED_STRING", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", @@ -99,10 +100,10 @@ private static String[] makeLiteralNames() { "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'", null, null, - null, "'functions'", null, null, null, "':'" + null, null, null, null, null, "'options'", "'metadata'", null, null, + null, null, null, null, null, null, "'as'", null, null, null, "'on'", + "'with'", null, null, null, null, null, null, null, null, null, null, + "'info'", null, null, null, "'functions'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -112,13 +113,13 @@ private static String[] makeSymbolicNames() { "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", - "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", - "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", - "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", - "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", + "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", + "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", + "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", + "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", + "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", @@ -190,7 +191,7 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000l\u049e\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000m\u04ae\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ @@ -232,707 +233,717 @@ public EsqlBaseLexer(CharStream input) { "\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095"+ "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ - "\u0002\u009c\u0007\u009c\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ + "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01de\b\u0012\u000b\u0012\f"+ - "\u0012\u01df\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0005\u0013\u01e8\b\u0013\n\u0013\f\u0013\u01eb\t\u0013\u0001"+ - "\u0013\u0003\u0013\u01ee\b\u0013\u0001\u0013\u0003\u0013\u01f1\b\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u01fa\b\u0014\n\u0014\f\u0014\u01fd\t\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ - "\u0015\u0205\b\u0015\u000b\u0015\f\u0015\u0206\u0001\u0015\u0001\u0015"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ - " \u0003 \u0230\b \u0001 \u0004 \u0233\b \u000b \f \u0234\u0001!\u0001"+ - "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u023e\b#\u0001$\u0001$\u0001"+ - "%\u0001%\u0001%\u0003%\u0245\b%\u0001&\u0001&\u0001&\u0005&\u024a\b&\n"+ - "&\f&\u024d\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u0255\b"+ - "&\n&\f&\u0258\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u025f\b&\u0001"+ - "&\u0003&\u0262\b&\u0003&\u0264\b&\u0001\'\u0004\'\u0267\b\'\u000b\'\f"+ - "\'\u0268\u0001(\u0004(\u026c\b(\u000b(\f(\u026d\u0001(\u0001(\u0005(\u0272"+ - "\b(\n(\f(\u0275\t(\u0001(\u0001(\u0004(\u0279\b(\u000b(\f(\u027a\u0001"+ - "(\u0004(\u027e\b(\u000b(\f(\u027f\u0001(\u0001(\u0005(\u0284\b(\n(\f("+ - "\u0287\t(\u0003(\u0289\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u028f\b("+ - "\u000b(\f(\u0290\u0001(\u0001(\u0003(\u0295\b(\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u0001"+ - "0\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u0001"+ - "1\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u00014\u00014\u0001"+ - "4\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u00017\u0001"+ - "7\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001"+ - "9\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001<\u0001"+ - "<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001>\u0001>\u0001"+ - ">\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001E\u0001E\u0001"+ - "E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ - "J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001"+ - "L\u0001M\u0001M\u0005M\u0315\bM\nM\fM\u0318\tM\u0001M\u0001M\u0003M\u031c"+ - "\bM\u0001M\u0004M\u031f\bM\u000bM\fM\u0320\u0003M\u0323\bM\u0001N\u0001"+ - "N\u0004N\u0327\bN\u000bN\fN\u0328\u0001N\u0001N\u0001O\u0001O\u0001P\u0001"+ - "P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ - "R\u0001S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001"+ - "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001X\u0001"+ - "X\u0001Y\u0001Y\u0001Y\u0003Y\u035c\bY\u0001Z\u0004Z\u035f\bZ\u000bZ\f"+ - "Z\u0360\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001"+ - "]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ - "_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001"+ - "a\u0001b\u0001b\u0001b\u0001b\u0003b\u0384\bb\u0001c\u0001c\u0003c\u0388"+ - "\bc\u0001c\u0005c\u038b\bc\nc\fc\u038e\tc\u0001c\u0001c\u0003c\u0392\b"+ - "c\u0001c\u0004c\u0395\bc\u000bc\fc\u0396\u0003c\u0399\bc\u0001d\u0001"+ - "d\u0004d\u039d\bd\u000bd\fd\u039e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001"+ - "f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001"+ - "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ - "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ - "m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ - "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ + "\u0012\u0004\u0012\u01e2\b\u0012\u000b\u0012\f\u0012\u01e3\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013"+ + "\u01ec\b\u0013\n\u0013\f\u0013\u01ef\t\u0013\u0001\u0013\u0003\u0013\u01f2"+ + "\b\u0013\u0001\u0013\u0003\u0013\u01f5\b\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ + "\u01fe\b\u0014\n\u0014\f\u0014\u0201\t\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004\u0015\u0209\b\u0015\u000b"+ + "\u0015\f\u0015\u020a\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003 \u0234\b \u0001 \u0004"+ + " \u0237\b \u000b \f \u0238\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001"+ + "#\u0001#\u0003#\u0242\b#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u0249"+ + "\b%\u0001&\u0001&\u0001&\u0005&\u024e\b&\n&\f&\u0251\t&\u0001&\u0001&"+ + "\u0001&\u0001&\u0001&\u0001&\u0005&\u0259\b&\n&\f&\u025c\t&\u0001&\u0001"+ + "&\u0001&\u0001&\u0001&\u0003&\u0263\b&\u0001&\u0003&\u0266\b&\u0003&\u0268"+ + "\b&\u0001\'\u0004\'\u026b\b\'\u000b\'\f\'\u026c\u0001(\u0004(\u0270\b"+ + "(\u000b(\f(\u0271\u0001(\u0001(\u0005(\u0276\b(\n(\f(\u0279\t(\u0001("+ + "\u0001(\u0004(\u027d\b(\u000b(\f(\u027e\u0001(\u0004(\u0282\b(\u000b("+ + "\f(\u0283\u0001(\u0001(\u0005(\u0288\b(\n(\f(\u028b\t(\u0003(\u028d\b"+ + "(\u0001(\u0001(\u0001(\u0001(\u0004(\u0293\b(\u000b(\f(\u0294\u0001(\u0001"+ + "(\u0003(\u0299\b(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0001"+ + "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00010\u00010\u00010\u0001"+ + "0\u00011\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u00012\u0001"+ + "2\u00012\u00013\u00013\u00014\u00014\u00014\u00015\u00015\u00015\u0001"+ + "6\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00018\u0001"+ + "8\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u00019\u00019\u0001"+ + ":\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ + "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001"+ + "C\u0001C\u0001D\u0001D\u0001E\u0001E\u0001E\u0001F\u0001F\u0001G\u0001"+ + "G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001K\u0001"+ + "K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0005M\u0319"+ + "\bM\nM\fM\u031c\tM\u0001M\u0001M\u0003M\u0320\bM\u0001M\u0004M\u0323\b"+ + "M\u000bM\fM\u0324\u0003M\u0327\bM\u0001N\u0001N\u0004N\u032b\bN\u000b"+ + "N\fN\u032c\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ + "Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001"+ + "S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001"+ + "U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001"+ + "X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ + "[\u0001[\u0001[\u0003[\u036c\b[\u0001\\\u0004\\\u036f\b\\\u000b\\\f\\"+ + "\u0370\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_"+ + "\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001"+ + "a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001"+ + "c\u0001d\u0001d\u0001d\u0001d\u0003d\u0394\bd\u0001e\u0001e\u0003e\u0398"+ + "\be\u0001e\u0005e\u039b\be\ne\fe\u039e\te\u0001e\u0001e\u0003e\u03a2\b"+ + "e\u0001e\u0004e\u03a5\be\u000be\fe\u03a6\u0003e\u03a9\be\u0001f\u0001"+ + "f\u0004f\u03ad\bf\u000bf\ff\u03ae\u0001g\u0001g\u0001g\u0001g\u0001h\u0001"+ + "h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001"+ + "j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001"+ + "m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ + "o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001"+ "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001"+ - "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001v\u0004v\u03ea\bv\u000b"+ - "v\fv\u03eb\u0001v\u0001v\u0003v\u03f0\bv\u0001v\u0004v\u03f3\bv\u000b"+ - "v\fv\u03f4\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ - "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001"+ - "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001"+ - "}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ - "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ - "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ - "\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084"+ - "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085"+ - "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086"+ + "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001"+ + "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001x\u0004x\u03fa\bx\u000b"+ + "x\fx\u03fb\u0001x\u0001x\u0003x\u0400\bx\u0001x\u0004x\u0403\bx\u000b"+ + "x\fx\u0404\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ + "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001"+ + "}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f"+ + "\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082"+ + "\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083"+ + "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085"+ + "\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086"+ "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088"+ - "\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089"+ - "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b"+ - "\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ - "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e"+ - "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f"+ + "\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089"+ + "\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b"+ + "\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c"+ + "\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e"+ + "\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f"+ "\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ - "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092"+ + "\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092"+ "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093"+ - "\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095"+ - "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097"+ - "\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099"+ - "\u0001\u0099\u0004\u0099\u048f\b\u0099\u000b\u0099\f\u0099\u0490\u0001"+ - "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ - "\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0002"+ - "\u01fb\u0256\u0000\u009d\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014"+ - "\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&"+ - "\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u00144\u00156\u00168\u0000"+ - ":\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000"+ - "N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001f"+ - "b d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a"+ - "4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e"+ - ">\u00a0?\u00a2@\u00a4A\u00a6B\u00a8\u0000\u00aaC\u00acD\u00aeE\u00b0F"+ - "\u00b2\u0000\u00b4\u0000\u00b6\u0000\u00b8\u0000\u00ba\u0000\u00bcG\u00be"+ - "\u0000\u00c0H\u00c2\u0000\u00c4I\u00c6J\u00c8K\u00ca\u0000\u00cc\u0000"+ - "\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4L\u00d6M\u00d8N\u00daO\u00dc"+ - "\u0000\u00de\u0000\u00e0\u0000\u00e2\u0000\u00e4P\u00e6\u0000\u00e8Q\u00ea"+ - "R\u00ecS\u00ee\u0000\u00f0\u0000\u00f2T\u00f4U\u00f6\u0000\u00f8V\u00fa"+ - "\u0000\u00fc\u0000\u00feW\u0100X\u0102Y\u0104\u0000\u0106\u0000\u0108"+ - "\u0000\u010a\u0000\u010c\u0000\u010e\u0000\u0110\u0000\u0112Z\u0114[\u0116"+ - "\\\u0118\u0000\u011a\u0000\u011c\u0000\u011e\u0000\u0120]\u0122^\u0124"+ - "_\u0126\u0000\u0128`\u012aa\u012cb\u012ec\u0130\u0000\u0132d\u0134e\u0136"+ - "f\u0138g\u013a\u0000\u013ch\u013ei\u0140j\u0142k\u0144l\f\u0000\u0001"+ - "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t\n\r\r"+ - " //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ - "\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ - "\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]`"+ - "`||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04b9\u0000"+ - "\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010"+ - "\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014"+ - "\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018"+ - "\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c"+ - "\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001"+ - "\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000"+ - "\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000"+ - "\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000."+ - "\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001\u0000"+ - "\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000"+ - "\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001<"+ - "\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001\u0000"+ - "\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000\u0000"+ - "\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000\u0002"+ - "^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b\u0001"+ - "\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000\u0000"+ - "\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000\u0002"+ - "l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p\u0001"+ - "\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000\u0000"+ - "\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000\u0002"+ - "z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~\u0001"+ - "\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082\u0001"+ - "\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086\u0001"+ - "\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a\u0001"+ - "\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e\u0001"+ - "\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092\u0001"+ - "\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096\u0001"+ - "\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a\u0001"+ - "\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e\u0001"+ - "\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2\u0001"+ - "\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6\u0001"+ - "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001"+ - "\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001"+ - "\u0000\u0000\u0000\u0003\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4\u0001"+ - "\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8\u0001"+ - "\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc\u0001"+ - "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ - "\u0000\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001"+ - "\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0004\u00ca\u0001"+ - "\u0000\u0000\u0000\u0004\u00cc\u0001\u0000\u0000\u0000\u0004\u00ce\u0001"+ - "\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00d6\u0001"+ - "\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000\u0000\u0004\u00da\u0001"+ - "\u0000\u0000\u0000\u0005\u00dc\u0001\u0000\u0000\u0000\u0005\u00de\u0001"+ - "\u0000\u0000\u0000\u0005\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2\u0001"+ - "\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6\u0001"+ - "\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea\u0001"+ - "\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0006\u00ee\u0001"+ - "\u0000\u0000\u0000\u0006\u00f0\u0001\u0000\u0000\u0000\u0006\u00f2\u0001"+ - "\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f8\u0001"+ - "\u0000\u0000\u0000\u0006\u00fa\u0001\u0000\u0000\u0000\u0006\u00fc\u0001"+ - "\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100\u0001"+ - "\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0007\u0104\u0001"+ - "\u0000\u0000\u0000\u0007\u0106\u0001\u0000\u0000\u0000\u0007\u0108\u0001"+ - "\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c\u0001"+ - "\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110\u0001"+ - "\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001"+ - "\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\b\u0118\u0001\u0000"+ - "\u0000\u0000\b\u011a\u0001\u0000\u0000\u0000\b\u011c\u0001\u0000\u0000"+ - "\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001\u0000\u0000\u0000"+ - "\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000\u0000\u0000\t\u0126"+ - "\u0001\u0000\u0000\u0000\t\u0128\u0001\u0000\u0000\u0000\t\u012a\u0001"+ - "\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000\t\u012e\u0001\u0000"+ - "\u0000\u0000\n\u0130\u0001\u0000\u0000\u0000\n\u0132\u0001\u0000\u0000"+ - "\u0000\n\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001\u0000\u0000\u0000"+ - "\n\u0138\u0001\u0000\u0000\u0000\u000b\u013a\u0001\u0000\u0000\u0000\u000b"+ - "\u013c\u0001\u0000\u0000\u0000\u000b\u013e\u0001\u0000\u0000\u0000\u000b"+ - "\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000\u0000\u000b"+ - "\u0144\u0001\u0000\u0000\u0000\f\u0146\u0001\u0000\u0000\u0000\u000e\u0150"+ - "\u0001\u0000\u0000\u0000\u0010\u0157\u0001\u0000\u0000\u0000\u0012\u0160"+ - "\u0001\u0000\u0000\u0000\u0014\u0167\u0001\u0000\u0000\u0000\u0016\u0171"+ - "\u0001\u0000\u0000\u0000\u0018\u0178\u0001\u0000\u0000\u0000\u001a\u017f"+ - "\u0001\u0000\u0000\u0000\u001c\u018d\u0001\u0000\u0000\u0000\u001e\u0194"+ - "\u0001\u0000\u0000\u0000 \u019c\u0001\u0000\u0000\u0000\"\u01a3\u0001"+ - "\u0000\u0000\u0000$\u01af\u0001\u0000\u0000\u0000&\u01b8\u0001\u0000\u0000"+ - "\u0000(\u01be\u0001\u0000\u0000\u0000*\u01c5\u0001\u0000\u0000\u0000,"+ - "\u01cc\u0001\u0000\u0000\u0000.\u01d4\u0001\u0000\u0000\u00000\u01dd\u0001"+ - "\u0000\u0000\u00002\u01e3\u0001\u0000\u0000\u00004\u01f4\u0001\u0000\u0000"+ - "\u00006\u0204\u0001\u0000\u0000\u00008\u020a\u0001\u0000\u0000\u0000:"+ - "\u020f\u0001\u0000\u0000\u0000<\u0214\u0001\u0000\u0000\u0000>\u0218\u0001"+ - "\u0000\u0000\u0000@\u021c\u0001\u0000\u0000\u0000B\u0220\u0001\u0000\u0000"+ - "\u0000D\u0224\u0001\u0000\u0000\u0000F\u0226\u0001\u0000\u0000\u0000H"+ - "\u0228\u0001\u0000\u0000\u0000J\u022b\u0001\u0000\u0000\u0000L\u022d\u0001"+ - "\u0000\u0000\u0000N\u0236\u0001\u0000\u0000\u0000P\u0238\u0001\u0000\u0000"+ - "\u0000R\u023d\u0001\u0000\u0000\u0000T\u023f\u0001\u0000\u0000\u0000V"+ - "\u0244\u0001\u0000\u0000\u0000X\u0263\u0001\u0000\u0000\u0000Z\u0266\u0001"+ - "\u0000\u0000\u0000\\\u0294\u0001\u0000\u0000\u0000^\u0296\u0001\u0000"+ - "\u0000\u0000`\u0299\u0001\u0000\u0000\u0000b\u029d\u0001\u0000\u0000\u0000"+ - "d\u02a1\u0001\u0000\u0000\u0000f\u02a3\u0001\u0000\u0000\u0000h\u02a5"+ - "\u0001\u0000\u0000\u0000j\u02aa\u0001\u0000\u0000\u0000l\u02ac\u0001\u0000"+ - "\u0000\u0000n\u02b2\u0001\u0000\u0000\u0000p\u02b8\u0001\u0000\u0000\u0000"+ - "r\u02bd\u0001\u0000\u0000\u0000t\u02bf\u0001\u0000\u0000\u0000v\u02c2"+ - "\u0001\u0000\u0000\u0000x\u02c5\u0001\u0000\u0000\u0000z\u02ca\u0001\u0000"+ - "\u0000\u0000|\u02ce\u0001\u0000\u0000\u0000~\u02d3\u0001\u0000\u0000\u0000"+ - "\u0080\u02d9\u0001\u0000\u0000\u0000\u0082\u02dc\u0001\u0000\u0000\u0000"+ - "\u0084\u02de\u0001\u0000\u0000\u0000\u0086\u02e4\u0001\u0000\u0000\u0000"+ - "\u0088\u02e6\u0001\u0000\u0000\u0000\u008a\u02eb\u0001\u0000\u0000\u0000"+ - "\u008c\u02ee\u0001\u0000\u0000\u0000\u008e\u02f1\u0001\u0000\u0000\u0000"+ - "\u0090\u02f4\u0001\u0000\u0000\u0000\u0092\u02f6\u0001\u0000\u0000\u0000"+ - "\u0094\u02f9\u0001\u0000\u0000\u0000\u0096\u02fb\u0001\u0000\u0000\u0000"+ - "\u0098\u02fe\u0001\u0000\u0000\u0000\u009a\u0300\u0001\u0000\u0000\u0000"+ - "\u009c\u0302\u0001\u0000\u0000\u0000\u009e\u0304\u0001\u0000\u0000\u0000"+ - "\u00a0\u0306\u0001\u0000\u0000\u0000\u00a2\u0308\u0001\u0000\u0000\u0000"+ - "\u00a4\u030d\u0001\u0000\u0000\u0000\u00a6\u0322\u0001\u0000\u0000\u0000"+ - "\u00a8\u0324\u0001\u0000\u0000\u0000\u00aa\u032c\u0001\u0000\u0000\u0000"+ - "\u00ac\u032e\u0001\u0000\u0000\u0000\u00ae\u0332\u0001\u0000\u0000\u0000"+ - "\u00b0\u0336\u0001\u0000\u0000\u0000\u00b2\u033a\u0001\u0000\u0000\u0000"+ - "\u00b4\u033f\u0001\u0000\u0000\u0000\u00b6\u0343\u0001\u0000\u0000\u0000"+ - "\u00b8\u0347\u0001\u0000\u0000\u0000\u00ba\u034b\u0001\u0000\u0000\u0000"+ - "\u00bc\u034f\u0001\u0000\u0000\u0000\u00be\u035b\u0001\u0000\u0000\u0000"+ - "\u00c0\u035e\u0001\u0000\u0000\u0000\u00c2\u0362\u0001\u0000\u0000\u0000"+ - "\u00c4\u0366\u0001\u0000\u0000\u0000\u00c6\u036a\u0001\u0000\u0000\u0000"+ - "\u00c8\u036e\u0001\u0000\u0000\u0000\u00ca\u0372\u0001\u0000\u0000\u0000"+ - "\u00cc\u0377\u0001\u0000\u0000\u0000\u00ce\u037b\u0001\u0000\u0000\u0000"+ - "\u00d0\u0383\u0001\u0000\u0000\u0000\u00d2\u0398\u0001\u0000\u0000\u0000"+ - "\u00d4\u039c\u0001\u0000\u0000\u0000\u00d6\u03a0\u0001\u0000\u0000\u0000"+ - "\u00d8\u03a4\u0001\u0000\u0000\u0000\u00da\u03a8\u0001\u0000\u0000\u0000"+ - "\u00dc\u03ac\u0001\u0000\u0000\u0000\u00de\u03b1\u0001\u0000\u0000\u0000"+ - "\u00e0\u03b5\u0001\u0000\u0000\u0000\u00e2\u03b9\u0001\u0000\u0000\u0000"+ - "\u00e4\u03bd\u0001\u0000\u0000\u0000\u00e6\u03c0\u0001\u0000\u0000\u0000"+ - "\u00e8\u03c4\u0001\u0000\u0000\u0000\u00ea\u03c8\u0001\u0000\u0000\u0000"+ - "\u00ec\u03cc\u0001\u0000\u0000\u0000\u00ee\u03d0\u0001\u0000\u0000\u0000"+ - "\u00f0\u03d5\u0001\u0000\u0000\u0000\u00f2\u03da\u0001\u0000\u0000\u0000"+ - "\u00f4\u03df\u0001\u0000\u0000\u0000\u00f6\u03e6\u0001\u0000\u0000\u0000"+ - "\u00f8\u03ef\u0001\u0000\u0000\u0000\u00fa\u03f6\u0001\u0000\u0000\u0000"+ - "\u00fc\u03fa\u0001\u0000\u0000\u0000\u00fe\u03fe\u0001\u0000\u0000\u0000"+ - "\u0100\u0402\u0001\u0000\u0000\u0000\u0102\u0406\u0001\u0000\u0000\u0000"+ - "\u0104\u040a\u0001\u0000\u0000\u0000\u0106\u0410\u0001\u0000\u0000\u0000"+ - "\u0108\u0414\u0001\u0000\u0000\u0000\u010a\u0418\u0001\u0000\u0000\u0000"+ - "\u010c\u041c\u0001\u0000\u0000\u0000\u010e\u0420\u0001\u0000\u0000\u0000"+ - "\u0110\u0424\u0001\u0000\u0000\u0000\u0112\u0428\u0001\u0000\u0000\u0000"+ - "\u0114\u042c\u0001\u0000\u0000\u0000\u0116\u0430\u0001\u0000\u0000\u0000"+ - "\u0118\u0434\u0001\u0000\u0000\u0000\u011a\u0439\u0001\u0000\u0000\u0000"+ - "\u011c\u043d\u0001\u0000\u0000\u0000\u011e\u0441\u0001\u0000\u0000\u0000"+ - "\u0120\u0445\u0001\u0000\u0000\u0000\u0122\u0449\u0001\u0000\u0000\u0000"+ - "\u0124\u044d\u0001\u0000\u0000\u0000\u0126\u0451\u0001\u0000\u0000\u0000"+ - "\u0128\u0456\u0001\u0000\u0000\u0000\u012a\u045b\u0001\u0000\u0000\u0000"+ - "\u012c\u045f\u0001\u0000\u0000\u0000\u012e\u0463\u0001\u0000\u0000\u0000"+ - "\u0130\u0467\u0001\u0000\u0000\u0000\u0132\u046c\u0001\u0000\u0000\u0000"+ - "\u0134\u0476\u0001\u0000\u0000\u0000\u0136\u047a\u0001\u0000\u0000\u0000"+ - "\u0138\u047e\u0001\u0000\u0000\u0000\u013a\u0482\u0001\u0000\u0000\u0000"+ - "\u013c\u0487\u0001\u0000\u0000\u0000\u013e\u048e\u0001\u0000\u0000\u0000"+ - "\u0140\u0492\u0001\u0000\u0000\u0000\u0142\u0496\u0001\u0000\u0000\u0000"+ - "\u0144\u049a\u0001\u0000\u0000\u0000\u0146\u0147\u0005d\u0000\u0000\u0147"+ - "\u0148\u0005i\u0000\u0000\u0148\u0149\u0005s\u0000\u0000\u0149\u014a\u0005"+ - "s\u0000\u0000\u014a\u014b\u0005e\u0000\u0000\u014b\u014c\u0005c\u0000"+ - "\u0000\u014c\u014d\u0005t\u0000\u0000\u014d\u014e\u0001\u0000\u0000\u0000"+ - "\u014e\u014f\u0006\u0000\u0000\u0000\u014f\r\u0001\u0000\u0000\u0000\u0150"+ - "\u0151\u0005d\u0000\u0000\u0151\u0152\u0005r\u0000\u0000\u0152\u0153\u0005"+ - "o\u0000\u0000\u0153\u0154\u0005p\u0000\u0000\u0154\u0155\u0001\u0000\u0000"+ - "\u0000\u0155\u0156\u0006\u0001\u0001\u0000\u0156\u000f\u0001\u0000\u0000"+ - "\u0000\u0157\u0158\u0005e\u0000\u0000\u0158\u0159\u0005n\u0000\u0000\u0159"+ - "\u015a\u0005r\u0000\u0000\u015a\u015b\u0005i\u0000\u0000\u015b\u015c\u0005"+ - "c\u0000\u0000\u015c\u015d\u0005h\u0000\u0000\u015d\u015e\u0001\u0000\u0000"+ - "\u0000\u015e\u015f\u0006\u0002\u0002\u0000\u015f\u0011\u0001\u0000\u0000"+ - "\u0000\u0160\u0161\u0005e\u0000\u0000\u0161\u0162\u0005v\u0000\u0000\u0162"+ - "\u0163\u0005a\u0000\u0000\u0163\u0164\u0005l\u0000\u0000\u0164\u0165\u0001"+ - "\u0000\u0000\u0000\u0165\u0166\u0006\u0003\u0000\u0000\u0166\u0013\u0001"+ - "\u0000\u0000\u0000\u0167\u0168\u0005e\u0000\u0000\u0168\u0169\u0005x\u0000"+ - "\u0000\u0169\u016a\u0005p\u0000\u0000\u016a\u016b\u0005l\u0000\u0000\u016b"+ - "\u016c\u0005a\u0000\u0000\u016c\u016d\u0005i\u0000\u0000\u016d\u016e\u0005"+ - "n\u0000\u0000\u016e\u016f\u0001\u0000\u0000\u0000\u016f\u0170\u0006\u0004"+ - "\u0003\u0000\u0170\u0015\u0001\u0000\u0000\u0000\u0171\u0172\u0005f\u0000"+ - "\u0000\u0172\u0173\u0005r\u0000\u0000\u0173\u0174\u0005o\u0000\u0000\u0174"+ - "\u0175\u0005m\u0000\u0000\u0175\u0176\u0001\u0000\u0000\u0000\u0176\u0177"+ - "\u0006\u0005\u0004\u0000\u0177\u0017\u0001\u0000\u0000\u0000\u0178\u0179"+ - "\u0005g\u0000\u0000\u0179\u017a\u0005r\u0000\u0000\u017a\u017b\u0005o"+ - "\u0000\u0000\u017b\u017c\u0005k\u0000\u0000\u017c\u017d\u0001\u0000\u0000"+ - "\u0000\u017d\u017e\u0006\u0006\u0000\u0000\u017e\u0019\u0001\u0000\u0000"+ - "\u0000\u017f\u0180\u0005i\u0000\u0000\u0180\u0181\u0005n\u0000\u0000\u0181"+ - "\u0182\u0005l\u0000\u0000\u0182\u0183\u0005i\u0000\u0000\u0183\u0184\u0005"+ - "n\u0000\u0000\u0184\u0185\u0005e\u0000\u0000\u0185\u0186\u0005s\u0000"+ - "\u0000\u0186\u0187\u0005t\u0000\u0000\u0187\u0188\u0005a\u0000\u0000\u0188"+ - "\u0189\u0005t\u0000\u0000\u0189\u018a\u0005s\u0000\u0000\u018a\u018b\u0001"+ - "\u0000\u0000\u0000\u018b\u018c\u0006\u0007\u0000\u0000\u018c\u001b\u0001"+ - "\u0000\u0000\u0000\u018d\u018e\u0005k\u0000\u0000\u018e\u018f\u0005e\u0000"+ - "\u0000\u018f\u0190\u0005e\u0000\u0000\u0190\u0191\u0005p\u0000\u0000\u0191"+ - "\u0192\u0001\u0000\u0000\u0000\u0192\u0193\u0006\b\u0001\u0000\u0193\u001d"+ - "\u0001\u0000\u0000\u0000\u0194\u0195\u0005l\u0000\u0000\u0195\u0196\u0005"+ - "i\u0000\u0000\u0196\u0197\u0005m\u0000\u0000\u0197\u0198\u0005i\u0000"+ - "\u0000\u0198\u0199\u0005t\u0000\u0000\u0199\u019a\u0001\u0000\u0000\u0000"+ - "\u019a\u019b\u0006\t\u0000\u0000\u019b\u001f\u0001\u0000\u0000\u0000\u019c"+ - "\u019d\u0005m\u0000\u0000\u019d\u019e\u0005e\u0000\u0000\u019e\u019f\u0005"+ - "t\u0000\u0000\u019f\u01a0\u0005a\u0000\u0000\u01a0\u01a1\u0001\u0000\u0000"+ - "\u0000\u01a1\u01a2\u0006\n\u0005\u0000\u01a2!\u0001\u0000\u0000\u0000"+ - "\u01a3\u01a4\u0005m\u0000\u0000\u01a4\u01a5\u0005v\u0000\u0000\u01a5\u01a6"+ - "\u0005_\u0000\u0000\u01a6\u01a7\u0005e\u0000\u0000\u01a7\u01a8\u0005x"+ - "\u0000\u0000\u01a8\u01a9\u0005p\u0000\u0000\u01a9\u01aa\u0005a\u0000\u0000"+ - "\u01aa\u01ab\u0005n\u0000\u0000\u01ab\u01ac\u0005d\u0000\u0000\u01ac\u01ad"+ - "\u0001\u0000\u0000\u0000\u01ad\u01ae\u0006\u000b\u0006\u0000\u01ae#\u0001"+ - "\u0000\u0000\u0000\u01af\u01b0\u0005r\u0000\u0000\u01b0\u01b1\u0005e\u0000"+ - "\u0000\u01b1\u01b2\u0005n\u0000\u0000\u01b2\u01b3\u0005a\u0000\u0000\u01b3"+ - "\u01b4\u0005m\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000\u01b5\u01b6\u0001"+ - "\u0000\u0000\u0000\u01b6\u01b7\u0006\f\u0007\u0000\u01b7%\u0001\u0000"+ - "\u0000\u0000\u01b8\u01b9\u0005r\u0000\u0000\u01b9\u01ba\u0005o\u0000\u0000"+ - "\u01ba\u01bb\u0005w\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc"+ - "\u01bd\u0006\r\u0000\u0000\u01bd\'\u0001\u0000\u0000\u0000\u01be\u01bf"+ - "\u0005s\u0000\u0000\u01bf\u01c0\u0005h\u0000\u0000\u01c0\u01c1\u0005o"+ - "\u0000\u0000\u01c1\u01c2\u0005w\u0000\u0000\u01c2\u01c3\u0001\u0000\u0000"+ - "\u0000\u01c3\u01c4\u0006\u000e\b\u0000\u01c4)\u0001\u0000\u0000\u0000"+ - "\u01c5\u01c6\u0005s\u0000\u0000\u01c6\u01c7\u0005o\u0000\u0000\u01c7\u01c8"+ - "\u0005r\u0000\u0000\u01c8\u01c9\u0005t\u0000\u0000\u01c9\u01ca\u0001\u0000"+ - "\u0000\u0000\u01ca\u01cb\u0006\u000f\u0000\u0000\u01cb+\u0001\u0000\u0000"+ - "\u0000\u01cc\u01cd\u0005s\u0000\u0000\u01cd\u01ce\u0005t\u0000\u0000\u01ce"+ - "\u01cf\u0005a\u0000\u0000\u01cf\u01d0\u0005t\u0000\u0000\u01d0\u01d1\u0005"+ - "s\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0010"+ - "\u0000\u0000\u01d3-\u0001\u0000\u0000\u0000\u01d4\u01d5\u0005w\u0000\u0000"+ - "\u01d5\u01d6\u0005h\u0000\u0000\u01d6\u01d7\u0005e\u0000\u0000\u01d7\u01d8"+ - "\u0005r\u0000\u0000\u01d8\u01d9\u0005e\u0000\u0000\u01d9\u01da\u0001\u0000"+ - "\u0000\u0000\u01da\u01db\u0006\u0011\u0000\u0000\u01db/\u0001\u0000\u0000"+ - "\u0000\u01dc\u01de\b\u0000\u0000\u0000\u01dd\u01dc\u0001\u0000\u0000\u0000"+ - "\u01de\u01df\u0001\u0000\u0000\u0000\u01df\u01dd\u0001\u0000\u0000\u0000"+ - "\u01df\u01e0\u0001\u0000\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000"+ - "\u01e1\u01e2\u0006\u0012\u0000\u0000\u01e21\u0001\u0000\u0000\u0000\u01e3"+ - "\u01e4\u0005/\u0000\u0000\u01e4\u01e5\u0005/\u0000\u0000\u01e5\u01e9\u0001"+ - "\u0000\u0000\u0000\u01e6\u01e8\b\u0001\u0000\u0000\u01e7\u01e6\u0001\u0000"+ - "\u0000\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01e7\u0001\u0000"+ - "\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01ed\u0001\u0000"+ - "\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000\u01ec\u01ee\u0005\r\u0000"+ - "\u0000\u01ed\u01ec\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000"+ - "\u0000\u01ee\u01f0\u0001\u0000\u0000\u0000\u01ef\u01f1\u0005\n\u0000\u0000"+ - "\u01f0\u01ef\u0001\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000"+ - "\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3\u0006\u0013\t\u0000\u01f3"+ - "3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005/\u0000\u0000\u01f5\u01f6\u0005"+ - "*\u0000\u0000\u01f6\u01fb\u0001\u0000\u0000\u0000\u01f7\u01fa\u00034\u0014"+ - "\u0000\u01f8\u01fa\t\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000"+ - "\u01f9\u01f8\u0001\u0000\u0000\u0000\u01fa\u01fd\u0001\u0000\u0000\u0000"+ - "\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000\u0000"+ - "\u01fc\u01fe\u0001\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ - "\u01fe\u01ff\u0005*\u0000\u0000\u01ff\u0200\u0005/\u0000\u0000\u0200\u0201"+ - "\u0001\u0000\u0000\u0000\u0201\u0202\u0006\u0014\t\u0000\u02025\u0001"+ - "\u0000\u0000\u0000\u0203\u0205\u0007\u0002\u0000\u0000\u0204\u0203\u0001"+ - "\u0000\u0000\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206\u0204\u0001"+ - "\u0000\u0000\u0000\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0001"+ - "\u0000\u0000\u0000\u0208\u0209\u0006\u0015\t\u0000\u02097\u0001\u0000"+ - "\u0000\u0000\u020a\u020b\u0003\u00a2K\u0000\u020b\u020c\u0001\u0000\u0000"+ - "\u0000\u020c\u020d\u0006\u0016\n\u0000\u020d\u020e\u0006\u0016\u000b\u0000"+ - "\u020e9\u0001\u0000\u0000\u0000\u020f\u0210\u0003B\u001b\u0000\u0210\u0211"+ - "\u0001\u0000\u0000\u0000\u0211\u0212\u0006\u0017\f\u0000\u0212\u0213\u0006"+ - "\u0017\r\u0000\u0213;\u0001\u0000\u0000\u0000\u0214\u0215\u00036\u0015"+ - "\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216\u0217\u0006\u0018\t\u0000"+ - "\u0217=\u0001\u0000\u0000\u0000\u0218\u0219\u00032\u0013\u0000\u0219\u021a"+ - "\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0019\t\u0000\u021b?\u0001"+ - "\u0000\u0000\u0000\u021c\u021d\u00034\u0014\u0000\u021d\u021e\u0001\u0000"+ - "\u0000\u0000\u021e\u021f\u0006\u001a\t\u0000\u021fA\u0001\u0000\u0000"+ - "\u0000\u0220\u0221\u0005|\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000"+ - "\u0222\u0223\u0006\u001b\r\u0000\u0223C\u0001\u0000\u0000\u0000\u0224"+ - "\u0225\u0007\u0003\u0000\u0000\u0225E\u0001\u0000\u0000\u0000\u0226\u0227"+ - "\u0007\u0004\u0000\u0000\u0227G\u0001\u0000\u0000\u0000\u0228\u0229\u0005"+ - "\\\u0000\u0000\u0229\u022a\u0007\u0005\u0000\u0000\u022aI\u0001\u0000"+ - "\u0000\u0000\u022b\u022c\b\u0006\u0000\u0000\u022cK\u0001\u0000\u0000"+ - "\u0000\u022d\u022f\u0007\u0007\u0000\u0000\u022e\u0230\u0007\b\u0000\u0000"+ - "\u022f\u022e\u0001\u0000\u0000\u0000\u022f\u0230\u0001\u0000\u0000\u0000"+ - "\u0230\u0232\u0001\u0000\u0000\u0000\u0231\u0233\u0003D\u001c\u0000\u0232"+ - "\u0231\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000\u0234"+ - "\u0232\u0001\u0000\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000\u0235"+ - "M\u0001\u0000\u0000\u0000\u0236\u0237\u0005@\u0000\u0000\u0237O\u0001"+ - "\u0000\u0000\u0000\u0238\u0239\u0005`\u0000\u0000\u0239Q\u0001\u0000\u0000"+ - "\u0000\u023a\u023e\b\t\u0000\u0000\u023b\u023c\u0005`\u0000\u0000\u023c"+ - "\u023e\u0005`\u0000\u0000\u023d\u023a\u0001\u0000\u0000\u0000\u023d\u023b"+ - "\u0001\u0000\u0000\u0000\u023eS\u0001\u0000\u0000\u0000\u023f\u0240\u0005"+ - "_\u0000\u0000\u0240U\u0001\u0000\u0000\u0000\u0241\u0245\u0003F\u001d"+ - "\u0000\u0242\u0245\u0003D\u001c\u0000\u0243\u0245\u0003T$\u0000\u0244"+ - "\u0241\u0001\u0000\u0000\u0000\u0244\u0242\u0001\u0000\u0000\u0000\u0244"+ - "\u0243\u0001\u0000\u0000\u0000\u0245W\u0001\u0000\u0000\u0000\u0246\u024b"+ - "\u0005\"\u0000\u0000\u0247\u024a\u0003H\u001e\u0000\u0248\u024a\u0003"+ - "J\u001f\u0000\u0249\u0247\u0001\u0000\u0000\u0000\u0249\u0248\u0001\u0000"+ - "\u0000\u0000\u024a\u024d\u0001\u0000\u0000\u0000\u024b\u0249\u0001\u0000"+ - "\u0000\u0000\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024e\u0001\u0000"+ - "\u0000\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024e\u0264\u0005\"\u0000"+ - "\u0000\u024f\u0250\u0005\"\u0000\u0000\u0250\u0251\u0005\"\u0000\u0000"+ - "\u0251\u0252\u0005\"\u0000\u0000\u0252\u0256\u0001\u0000\u0000\u0000\u0253"+ - "\u0255\b\u0001\u0000\u0000\u0254\u0253\u0001\u0000\u0000\u0000\u0255\u0258"+ - "\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000\u0000\u0256\u0254"+ - "\u0001\u0000\u0000\u0000\u0257\u0259\u0001\u0000\u0000\u0000\u0258\u0256"+ - "\u0001\u0000\u0000\u0000\u0259\u025a\u0005\"\u0000\u0000\u025a\u025b\u0005"+ - "\"\u0000\u0000\u025b\u025c\u0005\"\u0000\u0000\u025c\u025e\u0001\u0000"+ - "\u0000\u0000\u025d\u025f\u0005\"\u0000\u0000\u025e\u025d\u0001\u0000\u0000"+ - "\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0261\u0001\u0000\u0000"+ - "\u0000\u0260\u0262\u0005\"\u0000\u0000\u0261\u0260\u0001\u0000\u0000\u0000"+ - "\u0261\u0262\u0001\u0000\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000"+ - "\u0263\u0246\u0001\u0000\u0000\u0000\u0263\u024f\u0001\u0000\u0000\u0000"+ - "\u0264Y\u0001\u0000\u0000\u0000\u0265\u0267\u0003D\u001c\u0000\u0266\u0265"+ - "\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u0266"+ - "\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269[\u0001"+ - "\u0000\u0000\u0000\u026a\u026c\u0003D\u001c\u0000\u026b\u026a\u0001\u0000"+ - "\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026b\u0001\u0000"+ - "\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000"+ - "\u0000\u0000\u026f\u0273\u0003j/\u0000\u0270\u0272\u0003D\u001c\u0000"+ - "\u0271\u0270\u0001\u0000\u0000\u0000\u0272\u0275\u0001\u0000\u0000\u0000"+ - "\u0273\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000"+ - "\u0274\u0295\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000"+ - "\u0276\u0278\u0003j/\u0000\u0277\u0279\u0003D\u001c\u0000\u0278\u0277"+ - "\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000\u0000\u027a\u0278"+ - "\u0001\u0000\u0000\u0000\u027a\u027b\u0001\u0000\u0000\u0000\u027b\u0295"+ - "\u0001\u0000\u0000\u0000\u027c\u027e\u0003D\u001c\u0000\u027d\u027c\u0001"+ - "\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u027d\u0001"+ - "\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280\u0288\u0001"+ - "\u0000\u0000\u0000\u0281\u0285\u0003j/\u0000\u0282\u0284\u0003D\u001c"+ - "\u0000\u0283\u0282\u0001\u0000\u0000\u0000\u0284\u0287\u0001\u0000\u0000"+ - "\u0000\u0285\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000"+ - "\u0000\u0286\u0289\u0001\u0000\u0000\u0000\u0287\u0285\u0001\u0000\u0000"+ - "\u0000\u0288\u0281\u0001\u0000\u0000\u0000\u0288\u0289\u0001\u0000\u0000"+ - "\u0000\u0289\u028a\u0001\u0000\u0000\u0000\u028a\u028b\u0003L \u0000\u028b"+ - "\u0295\u0001\u0000\u0000\u0000\u028c\u028e\u0003j/\u0000\u028d\u028f\u0003"+ - "D\u001c\u0000\u028e\u028d\u0001\u0000\u0000\u0000\u028f\u0290\u0001\u0000"+ - "\u0000\u0000\u0290\u028e\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000"+ - "\u0000\u0000\u0291\u0292\u0001\u0000\u0000\u0000\u0292\u0293\u0003L \u0000"+ - "\u0293\u0295\u0001\u0000\u0000\u0000\u0294\u026b\u0001\u0000\u0000\u0000"+ - "\u0294\u0276\u0001\u0000\u0000\u0000\u0294\u027d\u0001\u0000\u0000\u0000"+ - "\u0294\u028c\u0001\u0000\u0000\u0000\u0295]\u0001\u0000\u0000\u0000\u0296"+ - "\u0297\u0005b\u0000\u0000\u0297\u0298\u0005y\u0000\u0000\u0298_\u0001"+ - "\u0000\u0000\u0000\u0299\u029a\u0005a\u0000\u0000\u029a\u029b\u0005n\u0000"+ - "\u0000\u029b\u029c\u0005d\u0000\u0000\u029ca\u0001\u0000\u0000\u0000\u029d"+ - "\u029e\u0005a\u0000\u0000\u029e\u029f\u0005s\u0000\u0000\u029f\u02a0\u0005"+ - "c\u0000\u0000\u02a0c\u0001\u0000\u0000\u0000\u02a1\u02a2\u0005=\u0000"+ - "\u0000\u02a2e\u0001\u0000\u0000\u0000\u02a3\u02a4\u0005,\u0000\u0000\u02a4"+ - "g\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005d\u0000\u0000\u02a6\u02a7\u0005"+ - "e\u0000\u0000\u02a7\u02a8\u0005s\u0000\u0000\u02a8\u02a9\u0005c\u0000"+ - "\u0000\u02a9i\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005.\u0000\u0000\u02ab"+ - "k\u0001\u0000\u0000\u0000\u02ac\u02ad\u0005f\u0000\u0000\u02ad\u02ae\u0005"+ - "a\u0000\u0000\u02ae\u02af\u0005l\u0000\u0000\u02af\u02b0\u0005s\u0000"+ - "\u0000\u02b0\u02b1\u0005e\u0000\u0000\u02b1m\u0001\u0000\u0000\u0000\u02b2"+ - "\u02b3\u0005f\u0000\u0000\u02b3\u02b4\u0005i\u0000\u0000\u02b4\u02b5\u0005"+ - "r\u0000\u0000\u02b5\u02b6\u0005s\u0000\u0000\u02b6\u02b7\u0005t\u0000"+ - "\u0000\u02b7o\u0001\u0000\u0000\u0000\u02b8\u02b9\u0005l\u0000\u0000\u02b9"+ - "\u02ba\u0005a\u0000\u0000\u02ba\u02bb\u0005s\u0000\u0000\u02bb\u02bc\u0005"+ - "t\u0000\u0000\u02bcq\u0001\u0000\u0000\u0000\u02bd\u02be\u0005(\u0000"+ - "\u0000\u02bes\u0001\u0000\u0000\u0000\u02bf\u02c0\u0005i\u0000\u0000\u02c0"+ - "\u02c1\u0005n\u0000\u0000\u02c1u\u0001\u0000\u0000\u0000\u02c2\u02c3\u0005"+ - "i\u0000\u0000\u02c3\u02c4\u0005s\u0000\u0000\u02c4w\u0001\u0000\u0000"+ - "\u0000\u02c5\u02c6\u0005l\u0000\u0000\u02c6\u02c7\u0005i\u0000\u0000\u02c7"+ - "\u02c8\u0005k\u0000\u0000\u02c8\u02c9\u0005e\u0000\u0000\u02c9y\u0001"+ - "\u0000\u0000\u0000\u02ca\u02cb\u0005n\u0000\u0000\u02cb\u02cc\u0005o\u0000"+ - "\u0000\u02cc\u02cd\u0005t\u0000\u0000\u02cd{\u0001\u0000\u0000\u0000\u02ce"+ - "\u02cf\u0005n\u0000\u0000\u02cf\u02d0\u0005u\u0000\u0000\u02d0\u02d1\u0005"+ - "l\u0000\u0000\u02d1\u02d2\u0005l\u0000\u0000\u02d2}\u0001\u0000\u0000"+ - "\u0000\u02d3\u02d4\u0005n\u0000\u0000\u02d4\u02d5\u0005u\u0000\u0000\u02d5"+ - "\u02d6\u0005l\u0000\u0000\u02d6\u02d7\u0005l\u0000\u0000\u02d7\u02d8\u0005"+ - "s\u0000\u0000\u02d8\u007f\u0001\u0000\u0000\u0000\u02d9\u02da\u0005o\u0000"+ - "\u0000\u02da\u02db\u0005r\u0000\u0000\u02db\u0081\u0001\u0000\u0000\u0000"+ - "\u02dc\u02dd\u0005?\u0000\u0000\u02dd\u0083\u0001\u0000\u0000\u0000\u02de"+ - "\u02df\u0005r\u0000\u0000\u02df\u02e0\u0005l\u0000\u0000\u02e0\u02e1\u0005"+ - "i\u0000\u0000\u02e1\u02e2\u0005k\u0000\u0000\u02e2\u02e3\u0005e\u0000"+ - "\u0000\u02e3\u0085\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005)\u0000\u0000"+ - "\u02e5\u0087\u0001\u0000\u0000\u0000\u02e6\u02e7\u0005t\u0000\u0000\u02e7"+ - "\u02e8\u0005r\u0000\u0000\u02e8\u02e9\u0005u\u0000\u0000\u02e9\u02ea\u0005"+ - "e\u0000\u0000\u02ea\u0089\u0001\u0000\u0000\u0000\u02eb\u02ec\u0005=\u0000"+ - "\u0000\u02ec\u02ed\u0005=\u0000\u0000\u02ed\u008b\u0001\u0000\u0000\u0000"+ - "\u02ee\u02ef\u0005=\u0000\u0000\u02ef\u02f0\u0005~\u0000\u0000\u02f0\u008d"+ - "\u0001\u0000\u0000\u0000\u02f1\u02f2\u0005!\u0000\u0000\u02f2\u02f3\u0005"+ - "=\u0000\u0000\u02f3\u008f\u0001\u0000\u0000\u0000\u02f4\u02f5\u0005<\u0000"+ - "\u0000\u02f5\u0091\u0001\u0000\u0000\u0000\u02f6\u02f7\u0005<\u0000\u0000"+ - "\u02f7\u02f8\u0005=\u0000\u0000\u02f8\u0093\u0001\u0000\u0000\u0000\u02f9"+ - "\u02fa\u0005>\u0000\u0000\u02fa\u0095\u0001\u0000\u0000\u0000\u02fb\u02fc"+ - "\u0005>\u0000\u0000\u02fc\u02fd\u0005=\u0000\u0000\u02fd\u0097\u0001\u0000"+ - "\u0000\u0000\u02fe\u02ff\u0005+\u0000\u0000\u02ff\u0099\u0001\u0000\u0000"+ - "\u0000\u0300\u0301\u0005-\u0000\u0000\u0301\u009b\u0001\u0000\u0000\u0000"+ - "\u0302\u0303\u0005*\u0000\u0000\u0303\u009d\u0001\u0000\u0000\u0000\u0304"+ - "\u0305\u0005/\u0000\u0000\u0305\u009f\u0001\u0000\u0000\u0000\u0306\u0307"+ - "\u0005%\u0000\u0000\u0307\u00a1\u0001\u0000\u0000\u0000\u0308\u0309\u0005"+ - "[\u0000\u0000\u0309\u030a\u0001\u0000\u0000\u0000\u030a\u030b\u0006K\u0000"+ - "\u0000\u030b\u030c\u0006K\u0000\u0000\u030c\u00a3\u0001\u0000\u0000\u0000"+ - "\u030d\u030e\u0005]\u0000\u0000\u030e\u030f\u0001\u0000\u0000\u0000\u030f"+ - "\u0310\u0006L\r\u0000\u0310\u0311\u0006L\r\u0000\u0311\u00a5\u0001\u0000"+ - "\u0000\u0000\u0312\u0316\u0003F\u001d\u0000\u0313\u0315\u0003V%\u0000"+ - "\u0314\u0313\u0001\u0000\u0000\u0000\u0315\u0318\u0001\u0000\u0000\u0000"+ - "\u0316\u0314\u0001\u0000\u0000\u0000\u0316\u0317\u0001\u0000\u0000\u0000"+ - "\u0317\u0323\u0001\u0000\u0000\u0000\u0318\u0316\u0001\u0000\u0000\u0000"+ - "\u0319\u031c\u0003T$\u0000\u031a\u031c\u0003N!\u0000\u031b\u0319\u0001"+ - "\u0000\u0000\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c\u031e\u0001"+ - "\u0000\u0000\u0000\u031d\u031f\u0003V%\u0000\u031e\u031d\u0001\u0000\u0000"+ - "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u031e\u0001\u0000\u0000"+ - "\u0000\u0320\u0321\u0001\u0000\u0000\u0000\u0321\u0323\u0001\u0000\u0000"+ - "\u0000\u0322\u0312\u0001\u0000\u0000\u0000\u0322\u031b\u0001\u0000\u0000"+ - "\u0000\u0323\u00a7\u0001\u0000\u0000\u0000\u0324\u0326\u0003P\"\u0000"+ - "\u0325\u0327\u0003R#\u0000\u0326\u0325\u0001\u0000\u0000\u0000\u0327\u0328"+ - "\u0001\u0000\u0000\u0000\u0328\u0326\u0001\u0000\u0000\u0000\u0328\u0329"+ - "\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000\u0000\u032a\u032b"+ - "\u0003P\"\u0000\u032b\u00a9\u0001\u0000\u0000\u0000\u032c\u032d\u0003"+ - "\u00a8N\u0000\u032d\u00ab\u0001\u0000\u0000\u0000\u032e\u032f\u00032\u0013"+ - "\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0006P\t\u0000"+ - "\u0331\u00ad\u0001\u0000\u0000\u0000\u0332\u0333\u00034\u0014\u0000\u0333"+ - "\u0334\u0001\u0000\u0000\u0000\u0334\u0335\u0006Q\t\u0000\u0335\u00af"+ - "\u0001\u0000\u0000\u0000\u0336\u0337\u00036\u0015\u0000\u0337\u0338\u0001"+ - "\u0000\u0000\u0000\u0338\u0339\u0006R\t\u0000\u0339\u00b1\u0001\u0000"+ - "\u0000\u0000\u033a\u033b\u0003B\u001b\u0000\u033b\u033c\u0001\u0000\u0000"+ - "\u0000\u033c\u033d\u0006S\f\u0000\u033d\u033e\u0006S\r\u0000\u033e\u00b3"+ - "\u0001\u0000\u0000\u0000\u033f\u0340\u0003\u00a2K\u0000\u0340\u0341\u0001"+ - "\u0000\u0000\u0000\u0341\u0342\u0006T\n\u0000\u0342\u00b5\u0001\u0000"+ - "\u0000\u0000\u0343\u0344\u0003\u00a4L\u0000\u0344\u0345\u0001\u0000\u0000"+ - "\u0000\u0345\u0346\u0006U\u000e\u0000\u0346\u00b7\u0001\u0000\u0000\u0000"+ - "\u0347\u0348\u0003f-\u0000\u0348\u0349\u0001\u0000\u0000\u0000\u0349\u034a"+ - "\u0006V\u000f\u0000\u034a\u00b9\u0001\u0000\u0000\u0000\u034b\u034c\u0003"+ - "d,\u0000\u034c\u034d\u0001\u0000\u0000\u0000\u034d\u034e\u0006W\u0010"+ - "\u0000\u034e\u00bb\u0001\u0000\u0000\u0000\u034f\u0350\u0005m\u0000\u0000"+ - "\u0350\u0351\u0005e\u0000\u0000\u0351\u0352\u0005t\u0000\u0000\u0352\u0353"+ - "\u0005a\u0000\u0000\u0353\u0354\u0005d\u0000\u0000\u0354\u0355\u0005a"+ - "\u0000\u0000\u0355\u0356\u0005t\u0000\u0000\u0356\u0357\u0005a\u0000\u0000"+ - "\u0357\u00bd\u0001\u0000\u0000\u0000\u0358\u035c\b\n\u0000\u0000\u0359"+ - "\u035a\u0005/\u0000\u0000\u035a\u035c\b\u000b\u0000\u0000\u035b\u0358"+ - "\u0001\u0000\u0000\u0000\u035b\u0359\u0001\u0000\u0000\u0000\u035c\u00bf"+ - "\u0001\u0000\u0000\u0000\u035d\u035f\u0003\u00beY\u0000\u035e\u035d\u0001"+ - "\u0000\u0000\u0000\u035f\u0360\u0001\u0000\u0000\u0000\u0360\u035e\u0001"+ - "\u0000\u0000\u0000\u0360\u0361\u0001\u0000\u0000\u0000\u0361\u00c1\u0001"+ - "\u0000\u0000\u0000\u0362\u0363\u0003\u00aaO\u0000\u0363\u0364\u0001\u0000"+ - "\u0000\u0000\u0364\u0365\u0006[\u0011\u0000\u0365\u00c3\u0001\u0000\u0000"+ - "\u0000\u0366\u0367\u00032\u0013\u0000\u0367\u0368\u0001\u0000\u0000\u0000"+ - "\u0368\u0369\u0006\\\t\u0000\u0369\u00c5\u0001\u0000\u0000\u0000\u036a"+ - "\u036b\u00034\u0014\u0000\u036b\u036c\u0001\u0000\u0000\u0000\u036c\u036d"+ - "\u0006]\t\u0000\u036d\u00c7\u0001\u0000\u0000\u0000\u036e\u036f\u0003"+ - "6\u0015\u0000\u036f\u0370\u0001\u0000\u0000\u0000\u0370\u0371\u0006^\t"+ - "\u0000\u0371\u00c9\u0001\u0000\u0000\u0000\u0372\u0373\u0003B\u001b\u0000"+ - "\u0373\u0374\u0001\u0000\u0000\u0000\u0374\u0375\u0006_\f\u0000\u0375"+ - "\u0376\u0006_\r\u0000\u0376\u00cb\u0001\u0000\u0000\u0000\u0377\u0378"+ - "\u0003j/\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a\u0006`"+ - "\u0012\u0000\u037a\u00cd\u0001\u0000\u0000\u0000\u037b\u037c\u0003f-\u0000"+ - "\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006a\u000f\u0000\u037e"+ - "\u00cf\u0001\u0000\u0000\u0000\u037f\u0384\u0003F\u001d\u0000\u0380\u0384"+ - "\u0003D\u001c\u0000\u0381\u0384\u0003T$\u0000\u0382\u0384\u0003\u009c"+ - "H\u0000\u0383\u037f\u0001\u0000\u0000\u0000\u0383\u0380\u0001\u0000\u0000"+ - "\u0000\u0383\u0381\u0001\u0000\u0000\u0000\u0383\u0382\u0001\u0000\u0000"+ - "\u0000\u0384\u00d1\u0001\u0000\u0000\u0000\u0385\u0388\u0003F\u001d\u0000"+ - "\u0386\u0388\u0003\u009cH\u0000\u0387\u0385\u0001\u0000\u0000\u0000\u0387"+ - "\u0386\u0001\u0000\u0000\u0000\u0388\u038c\u0001\u0000\u0000\u0000\u0389"+ - "\u038b\u0003\u00d0b\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b\u038e"+ - "\u0001\u0000\u0000\u0000\u038c\u038a\u0001\u0000\u0000\u0000\u038c\u038d"+ - "\u0001\u0000\u0000\u0000\u038d\u0399\u0001\u0000\u0000\u0000\u038e\u038c"+ - "\u0001\u0000\u0000\u0000\u038f\u0392\u0003T$\u0000\u0390\u0392\u0003N"+ - "!\u0000\u0391\u038f\u0001\u0000\u0000\u0000\u0391\u0390\u0001\u0000\u0000"+ - "\u0000\u0392\u0394\u0001\u0000\u0000\u0000\u0393\u0395\u0003\u00d0b\u0000"+ - "\u0394\u0393\u0001\u0000\u0000\u0000\u0395\u0396\u0001\u0000\u0000\u0000"+ - "\u0396\u0394\u0001\u0000\u0000\u0000\u0396\u0397\u0001\u0000\u0000\u0000"+ - "\u0397\u0399\u0001\u0000\u0000\u0000\u0398\u0387\u0001\u0000\u0000\u0000"+ - "\u0398\u0391\u0001\u0000\u0000\u0000\u0399\u00d3\u0001\u0000\u0000\u0000"+ - "\u039a\u039d\u0003\u00d2c\u0000\u039b\u039d\u0003\u00a8N\u0000\u039c\u039a"+ - "\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d\u039e"+ - "\u0001\u0000\u0000\u0000\u039e\u039c\u0001\u0000\u0000\u0000\u039e\u039f"+ - "\u0001\u0000\u0000\u0000\u039f\u00d5\u0001\u0000\u0000\u0000\u03a0\u03a1"+ - "\u00032\u0013\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03a3\u0006"+ - "e\t\u0000\u03a3\u00d7\u0001\u0000\u0000\u0000\u03a4\u03a5\u00034\u0014"+ - "\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7\u0006f\t\u0000"+ - "\u03a7\u00d9\u0001\u0000\u0000\u0000\u03a8\u03a9\u00036\u0015\u0000\u03a9"+ - "\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006g\t\u0000\u03ab\u00db"+ - "\u0001\u0000\u0000\u0000\u03ac\u03ad\u0003B\u001b\u0000\u03ad\u03ae\u0001"+ - "\u0000\u0000\u0000\u03ae\u03af\u0006h\f\u0000\u03af\u03b0\u0006h\r\u0000"+ - "\u03b0\u00dd\u0001\u0000\u0000\u0000\u03b1\u03b2\u0003d,\u0000\u03b2\u03b3"+ - "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0006i\u0010\u0000\u03b4\u00df\u0001"+ - "\u0000\u0000\u0000\u03b5\u03b6\u0003f-\u0000\u03b6\u03b7\u0001\u0000\u0000"+ - "\u0000\u03b7\u03b8\u0006j\u000f\u0000\u03b8\u00e1\u0001\u0000\u0000\u0000"+ - "\u03b9\u03ba\u0003j/\u0000\u03ba\u03bb\u0001\u0000\u0000\u0000\u03bb\u03bc"+ - "\u0006k\u0012\u0000\u03bc\u00e3\u0001\u0000\u0000\u0000\u03bd\u03be\u0005"+ - "a\u0000\u0000\u03be\u03bf\u0005s\u0000\u0000\u03bf\u00e5\u0001\u0000\u0000"+ - "\u0000\u03c0\u03c1\u0003\u00d4d\u0000\u03c1\u03c2\u0001\u0000\u0000\u0000"+ - "\u03c2\u03c3\u0006m\u0013\u0000\u03c3\u00e7\u0001\u0000\u0000\u0000\u03c4"+ - "\u03c5\u00032\u0013\u0000\u03c5\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7"+ - "\u0006n\t\u0000\u03c7\u00e9\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003"+ - "4\u0014\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006o\t"+ - "\u0000\u03cb\u00eb\u0001\u0000\u0000\u0000\u03cc\u03cd\u00036\u0015\u0000"+ - "\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce\u03cf\u0006p\t\u0000\u03cf"+ - "\u00ed\u0001\u0000\u0000\u0000\u03d0\u03d1\u0003B\u001b\u0000\u03d1\u03d2"+ - "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0006q\f\u0000\u03d3\u03d4\u0006"+ - "q\r\u0000\u03d4\u00ef\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00a2"+ - "K\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006r\n\u0000"+ - "\u03d8\u03d9\u0006r\u0014\u0000\u03d9\u00f1\u0001\u0000\u0000\u0000\u03da"+ - "\u03db\u0005o\u0000\u0000\u03db\u03dc\u0005n\u0000\u0000\u03dc\u03dd\u0001"+ - "\u0000\u0000\u0000\u03dd\u03de\u0006s\u0015\u0000\u03de\u00f3\u0001\u0000"+ - "\u0000\u0000\u03df\u03e0\u0005w\u0000\u0000\u03e0\u03e1\u0005i\u0000\u0000"+ - "\u03e1\u03e2\u0005t\u0000\u0000\u03e2\u03e3\u0005h\u0000\u0000\u03e3\u03e4"+ - "\u0001\u0000\u0000\u0000\u03e4\u03e5\u0006t\u0015\u0000\u03e5\u00f5\u0001"+ - "\u0000\u0000\u0000\u03e6\u03e7\b\f\u0000\u0000\u03e7\u00f7\u0001\u0000"+ - "\u0000\u0000\u03e8\u03ea\u0003\u00f6u\u0000\u03e9\u03e8\u0001\u0000\u0000"+ - "\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb\u03e9\u0001\u0000\u0000"+ - "\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0001\u0000\u0000"+ - "\u0000\u03ed\u03ee\u0003\u013c\u0098\u0000\u03ee\u03f0\u0001\u0000\u0000"+ - "\u0000\u03ef\u03e9\u0001\u0000\u0000\u0000\u03ef\u03f0\u0001\u0000\u0000"+ - "\u0000\u03f0\u03f2\u0001\u0000\u0000\u0000\u03f1\u03f3\u0003\u00f6u\u0000"+ - "\u03f2\u03f1\u0001\u0000\u0000\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000"+ - "\u03f4\u03f2\u0001\u0000\u0000\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000"+ - "\u03f5\u00f9\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003\u00aaO\u0000\u03f7"+ - "\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006w\u0011\u0000\u03f9\u00fb"+ - "\u0001\u0000\u0000\u0000\u03fa\u03fb\u0003\u00f8v\u0000\u03fb\u03fc\u0001"+ - "\u0000\u0000\u0000\u03fc\u03fd\u0006x\u0016\u0000\u03fd\u00fd\u0001\u0000"+ - "\u0000\u0000\u03fe\u03ff\u00032\u0013\u0000\u03ff\u0400\u0001\u0000\u0000"+ - "\u0000\u0400\u0401\u0006y\t\u0000\u0401\u00ff\u0001\u0000\u0000\u0000"+ - "\u0402\u0403\u00034\u0014\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404"+ - "\u0405\u0006z\t\u0000\u0405\u0101\u0001\u0000\u0000\u0000\u0406\u0407"+ - "\u00036\u0015\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006"+ - "{\t\u0000\u0409\u0103\u0001\u0000\u0000\u0000\u040a\u040b\u0003B\u001b"+ - "\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006|\f\u0000"+ - "\u040d\u040e\u0006|\r\u0000\u040e\u040f\u0006|\r\u0000\u040f\u0105\u0001"+ - "\u0000\u0000\u0000\u0410\u0411\u0003d,\u0000\u0411\u0412\u0001\u0000\u0000"+ - "\u0000\u0412\u0413\u0006}\u0010\u0000\u0413\u0107\u0001\u0000\u0000\u0000"+ - "\u0414\u0415\u0003f-\u0000\u0415\u0416\u0001\u0000\u0000\u0000\u0416\u0417"+ - "\u0006~\u000f\u0000\u0417\u0109\u0001\u0000\u0000\u0000\u0418\u0419\u0003"+ - "j/\u0000\u0419\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006\u007f\u0012"+ - "\u0000\u041b\u010b\u0001\u0000\u0000\u0000\u041c\u041d\u0003\u00f4t\u0000"+ - "\u041d\u041e\u0001\u0000\u0000\u0000\u041e\u041f\u0006\u0080\u0017\u0000"+ - "\u041f\u010d\u0001\u0000\u0000\u0000\u0420\u0421\u0003\u00d4d\u0000\u0421"+ - "\u0422\u0001\u0000\u0000\u0000\u0422\u0423\u0006\u0081\u0013\u0000\u0423"+ - "\u010f\u0001\u0000\u0000\u0000\u0424\u0425\u0003\u00aaO\u0000\u0425\u0426"+ - "\u0001\u0000\u0000\u0000\u0426\u0427\u0006\u0082\u0011\u0000\u0427\u0111"+ - "\u0001\u0000\u0000\u0000\u0428\u0429\u00032\u0013\u0000\u0429\u042a\u0001"+ - "\u0000\u0000\u0000\u042a\u042b\u0006\u0083\t\u0000\u042b\u0113\u0001\u0000"+ - "\u0000\u0000\u042c\u042d\u00034\u0014\u0000\u042d\u042e\u0001\u0000\u0000"+ - "\u0000\u042e\u042f\u0006\u0084\t\u0000\u042f\u0115\u0001\u0000\u0000\u0000"+ - "\u0430\u0431\u00036\u0015\u0000\u0431\u0432\u0001\u0000\u0000\u0000\u0432"+ - "\u0433\u0006\u0085\t\u0000\u0433\u0117\u0001\u0000\u0000\u0000\u0434\u0435"+ - "\u0003B\u001b\u0000\u0435\u0436\u0001\u0000\u0000\u0000\u0436\u0437\u0006"+ - "\u0086\f\u0000\u0437\u0438\u0006\u0086\r\u0000\u0438\u0119\u0001\u0000"+ - "\u0000\u0000\u0439\u043a\u0003j/\u0000\u043a\u043b\u0001\u0000\u0000\u0000"+ - "\u043b\u043c\u0006\u0087\u0012\u0000\u043c\u011b\u0001\u0000\u0000\u0000"+ - "\u043d\u043e\u0003\u00aaO\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f"+ - "\u0440\u0006\u0088\u0011\u0000\u0440\u011d\u0001\u0000\u0000\u0000\u0441"+ - "\u0442\u0003\u00a6M\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443\u0444"+ - "\u0006\u0089\u0018\u0000\u0444\u011f\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u00032\u0013\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "\u008a\t\u0000\u0448\u0121\u0001\u0000\u0000\u0000\u0449\u044a\u00034"+ - "\u0014\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u008b"+ - "\t\u0000\u044c\u0123\u0001\u0000\u0000\u0000\u044d\u044e\u00036\u0015"+ - "\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f\u0450\u0006\u008c\t\u0000"+ - "\u0450\u0125\u0001\u0000\u0000\u0000\u0451\u0452\u0003B\u001b\u0000\u0452"+ - "\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006\u008d\f\u0000\u0454\u0455"+ - "\u0006\u008d\r\u0000\u0455\u0127\u0001\u0000\u0000\u0000\u0456\u0457\u0005"+ - "i\u0000\u0000\u0457\u0458\u0005n\u0000\u0000\u0458\u0459\u0005f\u0000"+ - "\u0000\u0459\u045a\u0005o\u0000\u0000\u045a\u0129\u0001\u0000\u0000\u0000"+ - "\u045b\u045c\u00032\u0013\u0000\u045c\u045d\u0001\u0000\u0000\u0000\u045d"+ - "\u045e\u0006\u008f\t\u0000\u045e\u012b\u0001\u0000\u0000\u0000\u045f\u0460"+ - "\u00034\u0014\u0000\u0460\u0461\u0001\u0000\u0000\u0000\u0461\u0462\u0006"+ - "\u0090\t\u0000\u0462\u012d\u0001\u0000\u0000\u0000\u0463\u0464\u00036"+ - "\u0015\u0000\u0464\u0465\u0001\u0000\u0000\u0000\u0465\u0466\u0006\u0091"+ - "\t\u0000\u0466\u012f\u0001\u0000\u0000\u0000\u0467\u0468\u0003B\u001b"+ - "\u0000\u0468\u0469\u0001\u0000\u0000\u0000\u0469\u046a\u0006\u0092\f\u0000"+ - "\u046a\u046b\u0006\u0092\r\u0000\u046b\u0131\u0001\u0000\u0000\u0000\u046c"+ - "\u046d\u0005f\u0000\u0000\u046d\u046e\u0005u\u0000\u0000\u046e\u046f\u0005"+ - "n\u0000\u0000\u046f\u0470\u0005c\u0000\u0000\u0470\u0471\u0005t\u0000"+ - "\u0000\u0471\u0472\u0005i\u0000\u0000\u0472\u0473\u0005o\u0000\u0000\u0473"+ - "\u0474\u0005n\u0000\u0000\u0474\u0475\u0005s\u0000\u0000\u0475\u0133\u0001"+ - "\u0000\u0000\u0000\u0476\u0477\u00032\u0013\u0000\u0477\u0478\u0001\u0000"+ - "\u0000\u0000\u0478\u0479\u0006\u0094\t\u0000\u0479\u0135\u0001\u0000\u0000"+ - "\u0000\u047a\u047b\u00034\u0014\u0000\u047b\u047c\u0001\u0000\u0000\u0000"+ - "\u047c\u047d\u0006\u0095\t\u0000\u047d\u0137\u0001\u0000\u0000\u0000\u047e"+ - "\u047f\u00036\u0015\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481"+ - "\u0006\u0096\t\u0000\u0481\u0139\u0001\u0000\u0000\u0000\u0482\u0483\u0003"+ - "\u00a4L\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0485\u0006\u0097"+ - "\u000e\u0000\u0485\u0486\u0006\u0097\r\u0000\u0486\u013b\u0001\u0000\u0000"+ - "\u0000\u0487\u0488\u0005:\u0000\u0000\u0488\u013d\u0001\u0000\u0000\u0000"+ - "\u0489\u048f\u0003N!\u0000\u048a\u048f\u0003D\u001c\u0000\u048b\u048f"+ - "\u0003j/\u0000\u048c\u048f\u0003F\u001d\u0000\u048d\u048f\u0003T$\u0000"+ - "\u048e\u0489\u0001\u0000\u0000\u0000\u048e\u048a\u0001\u0000\u0000\u0000"+ - "\u048e\u048b\u0001\u0000\u0000\u0000\u048e\u048c\u0001\u0000\u0000\u0000"+ - "\u048e\u048d\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000\u0000"+ - "\u0490\u048e\u0001\u0000\u0000\u0000\u0490\u0491\u0001\u0000\u0000\u0000"+ - "\u0491\u013f\u0001\u0000\u0000\u0000\u0492\u0493\u00032\u0013\u0000\u0493"+ - "\u0494\u0001\u0000\u0000\u0000\u0494\u0495\u0006\u009a\t\u0000\u0495\u0141"+ - "\u0001\u0000\u0000\u0000\u0496\u0497\u00034\u0014\u0000\u0497\u0498\u0001"+ - "\u0000\u0000\u0000\u0498\u0499\u0006\u009b\t\u0000\u0499\u0143\u0001\u0000"+ - "\u0000\u0000\u049a\u049b\u00036\u0015\u0000\u049b\u049c\u0001\u0000\u0000"+ - "\u0000\u049c\u049d\u0006\u009c\t\u0000\u049d\u0145\u0001\u0000\u0000\u0000"+ - ":\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01df\u01e9"+ - "\u01ed\u01f0\u01f9\u01fb\u0206\u022f\u0234\u023d\u0244\u0249\u024b\u0256"+ - "\u025e\u0261\u0263\u0268\u026d\u0273\u027a\u027f\u0285\u0288\u0290\u0294"+ - "\u0316\u031b\u0320\u0322\u0328\u035b\u0360\u0383\u0387\u038c\u0391\u0396"+ - "\u0398\u039c\u039e\u03eb\u03ef\u03f4\u048e\u0490\u0019\u0005\u0002\u0000"+ - "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ - "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ - "\u0000\u0007@\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ - "\u0007A\u0000\u0007\"\u0000\u0007!\u0000\u0007C\u0000\u0007$\u0000\u0007"+ - "L\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007V\u0000\u0007U\u0000"+ - "\u0007B\u0000"; + "\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097"+ + "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099"+ + "\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009b"+ + "\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0004\u009b\u049f\b\u009b"+ + "\u000b\u009b\f\u009b\u04a0\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c"+ + "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e"+ + "\u0001\u009e\u0001\u009e\u0002\u01ff\u025a\u0000\u009f\f\u0001\u000e\u0002"+ + "\u0010\u0003\u0012\u0004\u0014\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c"+ + "\t\u001e\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u0014"+ + "4\u00156\u00168\u0000:\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000"+ + "H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c"+ + "\\\u001d^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u0084"+ + "1\u00862\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098"+ + ";\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8\u0000\u00aaC"+ + "\u00acD\u00aeE\u00b0F\u00b2\u0000\u00b4\u0000\u00b6\u0000\u00b8\u0000"+ + "\u00ba\u0000\u00bc\u0000\u00beG\u00c0H\u00c2\u0000\u00c4I\u00c6\u0000"+ + "\u00c8J\u00caK\u00ccL\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000"+ + "\u00d6\u0000\u00d8M\u00daN\u00dcO\u00deP\u00e0\u0000\u00e2\u0000\u00e4"+ + "\u0000\u00e6\u0000\u00e8Q\u00ea\u0000\u00ecR\u00eeS\u00f0T\u00f2\u0000"+ + "\u00f4\u0000\u00f6U\u00f8V\u00fa\u0000\u00fcW\u00fe\u0000\u0100\u0000"+ + "\u0102X\u0104Y\u0106Z\u0108\u0000\u010a\u0000\u010c\u0000\u010e\u0000"+ + "\u0110\u0000\u0112\u0000\u0114\u0000\u0116[\u0118\\\u011a]\u011c\u0000"+ + "\u011e\u0000\u0120\u0000\u0122\u0000\u0124^\u0126_\u0128`\u012a\u0000"+ + "\u012ca\u012eb\u0130c\u0132d\u0134\u0000\u0136e\u0138f\u013ag\u013ch\u013e"+ + "\u0000\u0140i\u0142j\u0144k\u0146l\u0148m\f\u0000\u0001\u0002\u0003\u0004"+ + "\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000"+ + "\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000"+ + "\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000"+ + "++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b"+ + "\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04c9\u0000\f\u0001\u0000\u0000\u0000"+ + "\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000"+ + "\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000"+ + "\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000"+ + "\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000"+ + "\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000"+ + "\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001"+ + "\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000"+ + "\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u0000"+ + "0\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001"+ + "\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000"+ + "\u0000\u0001:\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001"+ + ">\u0001\u0000\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0002B\u0001"+ + "\u0000\u0000\u0000\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000"+ + "\u0000\u0002\\\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000"+ + "\u0002`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d"+ + "\u0001\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000"+ + "\u0000\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000"+ + "\u0002n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r"+ + "\u0001\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000"+ + "\u0000\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000"+ + "\u0002|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080"+ + "\u0001\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084"+ + "\u0001\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088"+ + "\u0001\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c"+ + "\u0001\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090"+ + "\u0001\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094"+ + "\u0001\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098"+ + "\u0001\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c"+ + "\u0001\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0"+ + "\u0001\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4"+ + "\u0001\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00aa"+ + "\u0001\u0000\u0000\u0000\u0002\u00ac\u0001\u0000\u0000\u0000\u0002\u00ae"+ + "\u0001\u0000\u0000\u0000\u0002\u00b0\u0001\u0000\u0000\u0000\u0003\u00b2"+ + "\u0001\u0000\u0000\u0000\u0003\u00b4\u0001\u0000\u0000\u0000\u0003\u00b6"+ + "\u0001\u0000\u0000\u0000\u0003\u00b8\u0001\u0000\u0000\u0000\u0003\u00ba"+ + "\u0001\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be"+ + "\u0001\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c4"+ + "\u0001\u0000\u0000\u0000\u0003\u00c6\u0001\u0000\u0000\u0000\u0003\u00c8"+ + "\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc"+ + "\u0001\u0000\u0000\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0"+ + "\u0001\u0000\u0000\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d8"+ + "\u0001\u0000\u0000\u0000\u0004\u00da\u0001\u0000\u0000\u0000\u0004\u00dc"+ + "\u0001\u0000\u0000\u0000\u0004\u00de\u0001\u0000\u0000\u0000\u0005\u00e0"+ + "\u0001\u0000\u0000\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4"+ + "\u0001\u0000\u0000\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8"+ + "\u0001\u0000\u0000\u0000\u0005\u00ea\u0001\u0000\u0000\u0000\u0005\u00ec"+ + "\u0001\u0000\u0000\u0000\u0005\u00ee\u0001\u0000\u0000\u0000\u0005\u00f0"+ + "\u0001\u0000\u0000\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4"+ + "\u0001\u0000\u0000\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8"+ + "\u0001\u0000\u0000\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe"+ + "\u0001\u0000\u0000\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102"+ + "\u0001\u0000\u0000\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106"+ + "\u0001\u0000\u0000\u0000\u0007\u0108\u0001\u0000\u0000\u0000\u0007\u010a"+ + "\u0001\u0000\u0000\u0000\u0007\u010c\u0001\u0000\u0000\u0000\u0007\u010e"+ + "\u0001\u0000\u0000\u0000\u0007\u0110\u0001\u0000\u0000\u0000\u0007\u0112"+ + "\u0001\u0000\u0000\u0000\u0007\u0114\u0001\u0000\u0000\u0000\u0007\u0116"+ + "\u0001\u0000\u0000\u0000\u0007\u0118\u0001\u0000\u0000\u0000\u0007\u011a"+ + "\u0001\u0000\u0000\u0000\b\u011c\u0001\u0000\u0000\u0000\b\u011e\u0001"+ + "\u0000\u0000\u0000\b\u0120\u0001\u0000\u0000\u0000\b\u0122\u0001\u0000"+ + "\u0000\u0000\b\u0124\u0001\u0000\u0000\u0000\b\u0126\u0001\u0000\u0000"+ + "\u0000\b\u0128\u0001\u0000\u0000\u0000\t\u012a\u0001\u0000\u0000\u0000"+ + "\t\u012c\u0001\u0000\u0000\u0000\t\u012e\u0001\u0000\u0000\u0000\t\u0130"+ + "\u0001\u0000\u0000\u0000\t\u0132\u0001\u0000\u0000\u0000\n\u0134\u0001"+ + "\u0000\u0000\u0000\n\u0136\u0001\u0000\u0000\u0000\n\u0138\u0001\u0000"+ + "\u0000\u0000\n\u013a\u0001\u0000\u0000\u0000\n\u013c\u0001\u0000\u0000"+ + "\u0000\u000b\u013e\u0001\u0000\u0000\u0000\u000b\u0140\u0001\u0000\u0000"+ + "\u0000\u000b\u0142\u0001\u0000\u0000\u0000\u000b\u0144\u0001\u0000\u0000"+ + "\u0000\u000b\u0146\u0001\u0000\u0000\u0000\u000b\u0148\u0001\u0000\u0000"+ + "\u0000\f\u014a\u0001\u0000\u0000\u0000\u000e\u0154\u0001\u0000\u0000\u0000"+ + "\u0010\u015b\u0001\u0000\u0000\u0000\u0012\u0164\u0001\u0000\u0000\u0000"+ + "\u0014\u016b\u0001\u0000\u0000\u0000\u0016\u0175\u0001\u0000\u0000\u0000"+ + "\u0018\u017c\u0001\u0000\u0000\u0000\u001a\u0183\u0001\u0000\u0000\u0000"+ + "\u001c\u0191\u0001\u0000\u0000\u0000\u001e\u0198\u0001\u0000\u0000\u0000"+ + " \u01a0\u0001\u0000\u0000\u0000\"\u01a7\u0001\u0000\u0000\u0000$\u01b3"+ + "\u0001\u0000\u0000\u0000&\u01bc\u0001\u0000\u0000\u0000(\u01c2\u0001\u0000"+ + "\u0000\u0000*\u01c9\u0001\u0000\u0000\u0000,\u01d0\u0001\u0000\u0000\u0000"+ + ".\u01d8\u0001\u0000\u0000\u00000\u01e1\u0001\u0000\u0000\u00002\u01e7"+ + "\u0001\u0000\u0000\u00004\u01f8\u0001\u0000\u0000\u00006\u0208\u0001\u0000"+ + "\u0000\u00008\u020e\u0001\u0000\u0000\u0000:\u0213\u0001\u0000\u0000\u0000"+ + "<\u0218\u0001\u0000\u0000\u0000>\u021c\u0001\u0000\u0000\u0000@\u0220"+ + "\u0001\u0000\u0000\u0000B\u0224\u0001\u0000\u0000\u0000D\u0228\u0001\u0000"+ + "\u0000\u0000F\u022a\u0001\u0000\u0000\u0000H\u022c\u0001\u0000\u0000\u0000"+ + "J\u022f\u0001\u0000\u0000\u0000L\u0231\u0001\u0000\u0000\u0000N\u023a"+ + "\u0001\u0000\u0000\u0000P\u023c\u0001\u0000\u0000\u0000R\u0241\u0001\u0000"+ + "\u0000\u0000T\u0243\u0001\u0000\u0000\u0000V\u0248\u0001\u0000\u0000\u0000"+ + "X\u0267\u0001\u0000\u0000\u0000Z\u026a\u0001\u0000\u0000\u0000\\\u0298"+ + "\u0001\u0000\u0000\u0000^\u029a\u0001\u0000\u0000\u0000`\u029d\u0001\u0000"+ + "\u0000\u0000b\u02a1\u0001\u0000\u0000\u0000d\u02a5\u0001\u0000\u0000\u0000"+ + "f\u02a7\u0001\u0000\u0000\u0000h\u02a9\u0001\u0000\u0000\u0000j\u02ae"+ + "\u0001\u0000\u0000\u0000l\u02b0\u0001\u0000\u0000\u0000n\u02b6\u0001\u0000"+ + "\u0000\u0000p\u02bc\u0001\u0000\u0000\u0000r\u02c1\u0001\u0000\u0000\u0000"+ + "t\u02c3\u0001\u0000\u0000\u0000v\u02c6\u0001\u0000\u0000\u0000x\u02c9"+ + "\u0001\u0000\u0000\u0000z\u02ce\u0001\u0000\u0000\u0000|\u02d2\u0001\u0000"+ + "\u0000\u0000~\u02d7\u0001\u0000\u0000\u0000\u0080\u02dd\u0001\u0000\u0000"+ + "\u0000\u0082\u02e0\u0001\u0000\u0000\u0000\u0084\u02e2\u0001\u0000\u0000"+ + "\u0000\u0086\u02e8\u0001\u0000\u0000\u0000\u0088\u02ea\u0001\u0000\u0000"+ + "\u0000\u008a\u02ef\u0001\u0000\u0000\u0000\u008c\u02f2\u0001\u0000\u0000"+ + "\u0000\u008e\u02f5\u0001\u0000\u0000\u0000\u0090\u02f8\u0001\u0000\u0000"+ + "\u0000\u0092\u02fa\u0001\u0000\u0000\u0000\u0094\u02fd\u0001\u0000\u0000"+ + "\u0000\u0096\u02ff\u0001\u0000\u0000\u0000\u0098\u0302\u0001\u0000\u0000"+ + "\u0000\u009a\u0304\u0001\u0000\u0000\u0000\u009c\u0306\u0001\u0000\u0000"+ + "\u0000\u009e\u0308\u0001\u0000\u0000\u0000\u00a0\u030a\u0001\u0000\u0000"+ + "\u0000\u00a2\u030c\u0001\u0000\u0000\u0000\u00a4\u0311\u0001\u0000\u0000"+ + "\u0000\u00a6\u0326\u0001\u0000\u0000\u0000\u00a8\u0328\u0001\u0000\u0000"+ + "\u0000\u00aa\u0330\u0001\u0000\u0000\u0000\u00ac\u0332\u0001\u0000\u0000"+ + "\u0000\u00ae\u0336\u0001\u0000\u0000\u0000\u00b0\u033a\u0001\u0000\u0000"+ + "\u0000\u00b2\u033e\u0001\u0000\u0000\u0000\u00b4\u0343\u0001\u0000\u0000"+ + "\u0000\u00b6\u0347\u0001\u0000\u0000\u0000\u00b8\u034b\u0001\u0000\u0000"+ + "\u0000\u00ba\u034f\u0001\u0000\u0000\u0000\u00bc\u0353\u0001\u0000\u0000"+ + "\u0000\u00be\u0357\u0001\u0000\u0000\u0000\u00c0\u035f\u0001\u0000\u0000"+ + "\u0000\u00c2\u036b\u0001\u0000\u0000\u0000\u00c4\u036e\u0001\u0000\u0000"+ + "\u0000\u00c6\u0372\u0001\u0000\u0000\u0000\u00c8\u0376\u0001\u0000\u0000"+ + "\u0000\u00ca\u037a\u0001\u0000\u0000\u0000\u00cc\u037e\u0001\u0000\u0000"+ + "\u0000\u00ce\u0382\u0001\u0000\u0000\u0000\u00d0\u0387\u0001\u0000\u0000"+ + "\u0000\u00d2\u038b\u0001\u0000\u0000\u0000\u00d4\u0393\u0001\u0000\u0000"+ + "\u0000\u00d6\u03a8\u0001\u0000\u0000\u0000\u00d8\u03ac\u0001\u0000\u0000"+ + "\u0000\u00da\u03b0\u0001\u0000\u0000\u0000\u00dc\u03b4\u0001\u0000\u0000"+ + "\u0000\u00de\u03b8\u0001\u0000\u0000\u0000\u00e0\u03bc\u0001\u0000\u0000"+ + "\u0000\u00e2\u03c1\u0001\u0000\u0000\u0000\u00e4\u03c5\u0001\u0000\u0000"+ + "\u0000\u00e6\u03c9\u0001\u0000\u0000\u0000\u00e8\u03cd\u0001\u0000\u0000"+ + "\u0000\u00ea\u03d0\u0001\u0000\u0000\u0000\u00ec\u03d4\u0001\u0000\u0000"+ + "\u0000\u00ee\u03d8\u0001\u0000\u0000\u0000\u00f0\u03dc\u0001\u0000\u0000"+ + "\u0000\u00f2\u03e0\u0001\u0000\u0000\u0000\u00f4\u03e5\u0001\u0000\u0000"+ + "\u0000\u00f6\u03ea\u0001\u0000\u0000\u0000\u00f8\u03ef\u0001\u0000\u0000"+ + "\u0000\u00fa\u03f6\u0001\u0000\u0000\u0000\u00fc\u03ff\u0001\u0000\u0000"+ + "\u0000\u00fe\u0406\u0001\u0000\u0000\u0000\u0100\u040a\u0001\u0000\u0000"+ + "\u0000\u0102\u040e\u0001\u0000\u0000\u0000\u0104\u0412\u0001\u0000\u0000"+ + "\u0000\u0106\u0416\u0001\u0000\u0000\u0000\u0108\u041a\u0001\u0000\u0000"+ + "\u0000\u010a\u0420\u0001\u0000\u0000\u0000\u010c\u0424\u0001\u0000\u0000"+ + "\u0000\u010e\u0428\u0001\u0000\u0000\u0000\u0110\u042c\u0001\u0000\u0000"+ + "\u0000\u0112\u0430\u0001\u0000\u0000\u0000\u0114\u0434\u0001\u0000\u0000"+ + "\u0000\u0116\u0438\u0001\u0000\u0000\u0000\u0118\u043c\u0001\u0000\u0000"+ + "\u0000\u011a\u0440\u0001\u0000\u0000\u0000\u011c\u0444\u0001\u0000\u0000"+ + "\u0000\u011e\u0449\u0001\u0000\u0000\u0000\u0120\u044d\u0001\u0000\u0000"+ + "\u0000\u0122\u0451\u0001\u0000\u0000\u0000\u0124\u0455\u0001\u0000\u0000"+ + "\u0000\u0126\u0459\u0001\u0000\u0000\u0000\u0128\u045d\u0001\u0000\u0000"+ + "\u0000\u012a\u0461\u0001\u0000\u0000\u0000\u012c\u0466\u0001\u0000\u0000"+ + "\u0000\u012e\u046b\u0001\u0000\u0000\u0000\u0130\u046f\u0001\u0000\u0000"+ + "\u0000\u0132\u0473\u0001\u0000\u0000\u0000\u0134\u0477\u0001\u0000\u0000"+ + "\u0000\u0136\u047c\u0001\u0000\u0000\u0000\u0138\u0486\u0001\u0000\u0000"+ + "\u0000\u013a\u048a\u0001\u0000\u0000\u0000\u013c\u048e\u0001\u0000\u0000"+ + "\u0000\u013e\u0492\u0001\u0000\u0000\u0000\u0140\u0497\u0001\u0000\u0000"+ + "\u0000\u0142\u049e\u0001\u0000\u0000\u0000\u0144\u04a2\u0001\u0000\u0000"+ + "\u0000\u0146\u04a6\u0001\u0000\u0000\u0000\u0148\u04aa\u0001\u0000\u0000"+ + "\u0000\u014a\u014b\u0005d\u0000\u0000\u014b\u014c\u0005i\u0000\u0000\u014c"+ + "\u014d\u0005s\u0000\u0000\u014d\u014e\u0005s\u0000\u0000\u014e\u014f\u0005"+ + "e\u0000\u0000\u014f\u0150\u0005c\u0000\u0000\u0150\u0151\u0005t\u0000"+ + "\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0153\u0006\u0000\u0000"+ + "\u0000\u0153\r\u0001\u0000\u0000\u0000\u0154\u0155\u0005d\u0000\u0000"+ + "\u0155\u0156\u0005r\u0000\u0000\u0156\u0157\u0005o\u0000\u0000\u0157\u0158"+ + "\u0005p\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u0159\u015a\u0006"+ + "\u0001\u0001\u0000\u015a\u000f\u0001\u0000\u0000\u0000\u015b\u015c\u0005"+ + "e\u0000\u0000\u015c\u015d\u0005n\u0000\u0000\u015d\u015e\u0005r\u0000"+ + "\u0000\u015e\u015f\u0005i\u0000\u0000\u015f\u0160\u0005c\u0000\u0000\u0160"+ + "\u0161\u0005h\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162\u0163"+ + "\u0006\u0002\u0002\u0000\u0163\u0011\u0001\u0000\u0000\u0000\u0164\u0165"+ + "\u0005e\u0000\u0000\u0165\u0166\u0005v\u0000\u0000\u0166\u0167\u0005a"+ + "\u0000\u0000\u0167\u0168\u0005l\u0000\u0000\u0168\u0169\u0001\u0000\u0000"+ + "\u0000\u0169\u016a\u0006\u0003\u0000\u0000\u016a\u0013\u0001\u0000\u0000"+ + "\u0000\u016b\u016c\u0005e\u0000\u0000\u016c\u016d\u0005x\u0000\u0000\u016d"+ + "\u016e\u0005p\u0000\u0000\u016e\u016f\u0005l\u0000\u0000\u016f\u0170\u0005"+ + "a\u0000\u0000\u0170\u0171\u0005i\u0000\u0000\u0171\u0172\u0005n\u0000"+ + "\u0000\u0172\u0173\u0001\u0000\u0000\u0000\u0173\u0174\u0006\u0004\u0003"+ + "\u0000\u0174\u0015\u0001\u0000\u0000\u0000\u0175\u0176\u0005f\u0000\u0000"+ + "\u0176\u0177\u0005r\u0000\u0000\u0177\u0178\u0005o\u0000\u0000\u0178\u0179"+ + "\u0005m\u0000\u0000\u0179\u017a\u0001\u0000\u0000\u0000\u017a\u017b\u0006"+ + "\u0005\u0004\u0000\u017b\u0017\u0001\u0000\u0000\u0000\u017c\u017d\u0005"+ + "g\u0000\u0000\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005o\u0000"+ + "\u0000\u017f\u0180\u0005k\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000"+ + "\u0181\u0182\u0006\u0006\u0000\u0000\u0182\u0019\u0001\u0000\u0000\u0000"+ + "\u0183\u0184\u0005i\u0000\u0000\u0184\u0185\u0005n\u0000\u0000\u0185\u0186"+ + "\u0005l\u0000\u0000\u0186\u0187\u0005i\u0000\u0000\u0187\u0188\u0005n"+ + "\u0000\u0000\u0188\u0189\u0005e\u0000\u0000\u0189\u018a\u0005s\u0000\u0000"+ + "\u018a\u018b\u0005t\u0000\u0000\u018b\u018c\u0005a\u0000\u0000\u018c\u018d"+ + "\u0005t\u0000\u0000\u018d\u018e\u0005s\u0000\u0000\u018e\u018f\u0001\u0000"+ + "\u0000\u0000\u018f\u0190\u0006\u0007\u0000\u0000\u0190\u001b\u0001\u0000"+ + "\u0000\u0000\u0191\u0192\u0005k\u0000\u0000\u0192\u0193\u0005e\u0000\u0000"+ + "\u0193\u0194\u0005e\u0000\u0000\u0194\u0195\u0005p\u0000\u0000\u0195\u0196"+ + "\u0001\u0000\u0000\u0000\u0196\u0197\u0006\b\u0001\u0000\u0197\u001d\u0001"+ + "\u0000\u0000\u0000\u0198\u0199\u0005l\u0000\u0000\u0199\u019a\u0005i\u0000"+ + "\u0000\u019a\u019b\u0005m\u0000\u0000\u019b\u019c\u0005i\u0000\u0000\u019c"+ + "\u019d\u0005t\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e\u019f"+ + "\u0006\t\u0000\u0000\u019f\u001f\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005"+ + "m\u0000\u0000\u01a1\u01a2\u0005e\u0000\u0000\u01a2\u01a3\u0005t\u0000"+ + "\u0000\u01a3\u01a4\u0005a\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000"+ + "\u01a5\u01a6\u0006\n\u0005\u0000\u01a6!\u0001\u0000\u0000\u0000\u01a7"+ + "\u01a8\u0005m\u0000\u0000\u01a8\u01a9\u0005v\u0000\u0000\u01a9\u01aa\u0005"+ + "_\u0000\u0000\u01aa\u01ab\u0005e\u0000\u0000\u01ab\u01ac\u0005x\u0000"+ + "\u0000\u01ac\u01ad\u0005p\u0000\u0000\u01ad\u01ae\u0005a\u0000\u0000\u01ae"+ + "\u01af\u0005n\u0000\u0000\u01af\u01b0\u0005d\u0000\u0000\u01b0\u01b1\u0001"+ + "\u0000\u0000\u0000\u01b1\u01b2\u0006\u000b\u0006\u0000\u01b2#\u0001\u0000"+ + "\u0000\u0000\u01b3\u01b4\u0005r\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000"+ + "\u01b5\u01b6\u0005n\u0000\u0000\u01b6\u01b7\u0005a\u0000\u0000\u01b7\u01b8"+ + "\u0005m\u0000\u0000\u01b8\u01b9\u0005e\u0000\u0000\u01b9\u01ba\u0001\u0000"+ + "\u0000\u0000\u01ba\u01bb\u0006\f\u0007\u0000\u01bb%\u0001\u0000\u0000"+ + "\u0000\u01bc\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005o\u0000\u0000\u01be"+ + "\u01bf\u0005w\u0000\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c1"+ + "\u0006\r\u0000\u0000\u01c1\'\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005"+ + "s\u0000\u0000\u01c3\u01c4\u0005h\u0000\u0000\u01c4\u01c5\u0005o\u0000"+ + "\u0000\u01c5\u01c6\u0005w\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000"+ + "\u01c7\u01c8\u0006\u000e\b\u0000\u01c8)\u0001\u0000\u0000\u0000\u01c9"+ + "\u01ca\u0005s\u0000\u0000\u01ca\u01cb\u0005o\u0000\u0000\u01cb\u01cc\u0005"+ + "r\u0000\u0000\u01cc\u01cd\u0005t\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000"+ + "\u0000\u01ce\u01cf\u0006\u000f\u0000\u0000\u01cf+\u0001\u0000\u0000\u0000"+ + "\u01d0\u01d1\u0005s\u0000\u0000\u01d1\u01d2\u0005t\u0000\u0000\u01d2\u01d3"+ + "\u0005a\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000\u01d4\u01d5\u0005s"+ + "\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0010"+ + "\u0000\u0000\u01d7-\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005w\u0000\u0000"+ + "\u01d9\u01da\u0005h\u0000\u0000\u01da\u01db\u0005e\u0000\u0000\u01db\u01dc"+ + "\u0005r\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd\u01de\u0001\u0000"+ + "\u0000\u0000\u01de\u01df\u0006\u0011\u0000\u0000\u01df/\u0001\u0000\u0000"+ + "\u0000\u01e0\u01e2\b\u0000\u0000\u0000\u01e1\u01e0\u0001\u0000\u0000\u0000"+ + "\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000"+ + "\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000"+ + "\u01e5\u01e6\u0006\u0012\u0000\u0000\u01e61\u0001\u0000\u0000\u0000\u01e7"+ + "\u01e8\u0005/\u0000\u0000\u01e8\u01e9\u0005/\u0000\u0000\u01e9\u01ed\u0001"+ + "\u0000\u0000\u0000\u01ea\u01ec\b\u0001\u0000\u0000\u01eb\u01ea\u0001\u0000"+ + "\u0000\u0000\u01ec\u01ef\u0001\u0000\u0000\u0000\u01ed\u01eb\u0001\u0000"+ + "\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000"+ + "\u0000\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f2\u0005\r\u0000"+ + "\u0000\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000"+ + "\u0000\u01f2\u01f4\u0001\u0000\u0000\u0000\u01f3\u01f5\u0005\n\u0000\u0000"+ + "\u01f4\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000"+ + "\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0013\t\u0000\u01f7"+ + "3\u0001\u0000\u0000\u0000\u01f8\u01f9\u0005/\u0000\u0000\u01f9\u01fa\u0005"+ + "*\u0000\u0000\u01fa\u01ff\u0001\u0000\u0000\u0000\u01fb\u01fe\u00034\u0014"+ + "\u0000\u01fc\u01fe\t\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fd\u01fc\u0001\u0000\u0000\u0000\u01fe\u0201\u0001\u0000\u0000\u0000"+ + "\u01ff\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000\u0000"+ + "\u0200\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0005*\u0000\u0000\u0203\u0204\u0005/\u0000\u0000\u0204\u0205"+ + "\u0001\u0000\u0000\u0000\u0205\u0206\u0006\u0014\t\u0000\u02065\u0001"+ + "\u0000\u0000\u0000\u0207\u0209\u0007\u0002\u0000\u0000\u0208\u0207\u0001"+ + "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u0208\u0001"+ + "\u0000\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0001"+ + "\u0000\u0000\u0000\u020c\u020d\u0006\u0015\t\u0000\u020d7\u0001\u0000"+ + "\u0000\u0000\u020e\u020f\u0003\u00a2K\u0000\u020f\u0210\u0001\u0000\u0000"+ + "\u0000\u0210\u0211\u0006\u0016\n\u0000\u0211\u0212\u0006\u0016\u000b\u0000"+ + "\u02129\u0001\u0000\u0000\u0000\u0213\u0214\u0003B\u001b\u0000\u0214\u0215"+ + "\u0001\u0000\u0000\u0000\u0215\u0216\u0006\u0017\f\u0000\u0216\u0217\u0006"+ + "\u0017\r\u0000\u0217;\u0001\u0000\u0000\u0000\u0218\u0219\u00036\u0015"+ + "\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0018\t\u0000"+ + "\u021b=\u0001\u0000\u0000\u0000\u021c\u021d\u00032\u0013\u0000\u021d\u021e"+ + "\u0001\u0000\u0000\u0000\u021e\u021f\u0006\u0019\t\u0000\u021f?\u0001"+ + "\u0000\u0000\u0000\u0220\u0221\u00034\u0014\u0000\u0221\u0222\u0001\u0000"+ + "\u0000\u0000\u0222\u0223\u0006\u001a\t\u0000\u0223A\u0001\u0000\u0000"+ + "\u0000\u0224\u0225\u0005|\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000"+ + "\u0226\u0227\u0006\u001b\r\u0000\u0227C\u0001\u0000\u0000\u0000\u0228"+ + "\u0229\u0007\u0003\u0000\u0000\u0229E\u0001\u0000\u0000\u0000\u022a\u022b"+ + "\u0007\u0004\u0000\u0000\u022bG\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ + "\\\u0000\u0000\u022d\u022e\u0007\u0005\u0000\u0000\u022eI\u0001\u0000"+ + "\u0000\u0000\u022f\u0230\b\u0006\u0000\u0000\u0230K\u0001\u0000\u0000"+ + "\u0000\u0231\u0233\u0007\u0007\u0000\u0000\u0232\u0234\u0007\b\u0000\u0000"+ + "\u0233\u0232\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000"+ + "\u0234\u0236\u0001\u0000\u0000\u0000\u0235\u0237\u0003D\u001c\u0000\u0236"+ + "\u0235\u0001\u0000\u0000\u0000\u0237\u0238\u0001\u0000\u0000\u0000\u0238"+ + "\u0236\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239"+ + "M\u0001\u0000\u0000\u0000\u023a\u023b\u0005@\u0000\u0000\u023bO\u0001"+ + "\u0000\u0000\u0000\u023c\u023d\u0005`\u0000\u0000\u023dQ\u0001\u0000\u0000"+ + "\u0000\u023e\u0242\b\t\u0000\u0000\u023f\u0240\u0005`\u0000\u0000\u0240"+ + "\u0242\u0005`\u0000\u0000\u0241\u023e\u0001\u0000\u0000\u0000\u0241\u023f"+ + "\u0001\u0000\u0000\u0000\u0242S\u0001\u0000\u0000\u0000\u0243\u0244\u0005"+ + "_\u0000\u0000\u0244U\u0001\u0000\u0000\u0000\u0245\u0249\u0003F\u001d"+ + "\u0000\u0246\u0249\u0003D\u001c\u0000\u0247\u0249\u0003T$\u0000\u0248"+ + "\u0245\u0001\u0000\u0000\u0000\u0248\u0246\u0001\u0000\u0000\u0000\u0248"+ + "\u0247\u0001\u0000\u0000\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024f"+ + "\u0005\"\u0000\u0000\u024b\u024e\u0003H\u001e\u0000\u024c\u024e\u0003"+ + "J\u001f\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024d\u024c\u0001\u0000"+ + "\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000"+ + "\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0252\u0001\u0000"+ + "\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0268\u0005\"\u0000"+ + "\u0000\u0253\u0254\u0005\"\u0000\u0000\u0254\u0255\u0005\"\u0000\u0000"+ + "\u0255\u0256\u0005\"\u0000\u0000\u0256\u025a\u0001\u0000\u0000\u0000\u0257"+ + "\u0259\b\u0001\u0000\u0000\u0258\u0257\u0001\u0000\u0000\u0000\u0259\u025c"+ + "\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025a\u0258"+ + "\u0001\u0000\u0000\u0000\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025a"+ + "\u0001\u0000\u0000\u0000\u025d\u025e\u0005\"\u0000\u0000\u025e\u025f\u0005"+ + "\"\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0262\u0001\u0000"+ + "\u0000\u0000\u0261\u0263\u0005\"\u0000\u0000\u0262\u0261\u0001\u0000\u0000"+ + "\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0265\u0001\u0000\u0000"+ + "\u0000\u0264\u0266\u0005\"\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000"+ + "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0268\u0001\u0000\u0000\u0000"+ + "\u0267\u024a\u0001\u0000\u0000\u0000\u0267\u0253\u0001\u0000\u0000\u0000"+ + "\u0268Y\u0001\u0000\u0000\u0000\u0269\u026b\u0003D\u001c\u0000\u026a\u0269"+ + "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ + "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d[\u0001"+ + "\u0000\u0000\u0000\u026e\u0270\u0003D\u001c\u0000\u026f\u026e\u0001\u0000"+ + "\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u026f\u0001\u0000"+ + "\u0000\u0000\u0271\u0272\u0001\u0000\u0000\u0000\u0272\u0273\u0001\u0000"+ + "\u0000\u0000\u0273\u0277\u0003j/\u0000\u0274\u0276\u0003D\u001c\u0000"+ + "\u0275\u0274\u0001\u0000\u0000\u0000\u0276\u0279\u0001\u0000\u0000\u0000"+ + "\u0277\u0275\u0001\u0000\u0000\u0000\u0277\u0278\u0001\u0000\u0000\u0000"+ + "\u0278\u0299\u0001\u0000\u0000\u0000\u0279\u0277\u0001\u0000\u0000\u0000"+ + "\u027a\u027c\u0003j/\u0000\u027b\u027d\u0003D\u001c\u0000\u027c\u027b"+ + "\u0001\u0000\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u027c"+ + "\u0001\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u0299"+ + "\u0001\u0000\u0000\u0000\u0280\u0282\u0003D\u001c\u0000\u0281\u0280\u0001"+ + "\u0000\u0000\u0000\u0282\u0283\u0001\u0000\u0000\u0000\u0283\u0281\u0001"+ + "\u0000\u0000\u0000\u0283\u0284\u0001\u0000\u0000\u0000\u0284\u028c\u0001"+ + "\u0000\u0000\u0000\u0285\u0289\u0003j/\u0000\u0286\u0288\u0003D\u001c"+ + "\u0000\u0287\u0286\u0001\u0000\u0000\u0000\u0288\u028b\u0001\u0000\u0000"+ + "\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u0289\u028a\u0001\u0000\u0000"+ + "\u0000\u028a\u028d\u0001\u0000\u0000\u0000\u028b\u0289\u0001\u0000\u0000"+ + "\u0000\u028c\u0285\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000"+ + "\u0000\u028d\u028e\u0001\u0000\u0000\u0000\u028e\u028f\u0003L \u0000\u028f"+ + "\u0299\u0001\u0000\u0000\u0000\u0290\u0292\u0003j/\u0000\u0291\u0293\u0003"+ + "D\u001c\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293\u0294\u0001\u0000"+ + "\u0000\u0000\u0294\u0292\u0001\u0000\u0000\u0000\u0294\u0295\u0001\u0000"+ + "\u0000\u0000\u0295\u0296\u0001\u0000\u0000\u0000\u0296\u0297\u0003L \u0000"+ + "\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u026f\u0001\u0000\u0000\u0000"+ + "\u0298\u027a\u0001\u0000\u0000\u0000\u0298\u0281\u0001\u0000\u0000\u0000"+ + "\u0298\u0290\u0001\u0000\u0000\u0000\u0299]\u0001\u0000\u0000\u0000\u029a"+ + "\u029b\u0005b\u0000\u0000\u029b\u029c\u0005y\u0000\u0000\u029c_\u0001"+ + "\u0000\u0000\u0000\u029d\u029e\u0005a\u0000\u0000\u029e\u029f\u0005n\u0000"+ + "\u0000\u029f\u02a0\u0005d\u0000\u0000\u02a0a\u0001\u0000\u0000\u0000\u02a1"+ + "\u02a2\u0005a\u0000\u0000\u02a2\u02a3\u0005s\u0000\u0000\u02a3\u02a4\u0005"+ + "c\u0000\u0000\u02a4c\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005=\u0000"+ + "\u0000\u02a6e\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005,\u0000\u0000\u02a8"+ + "g\u0001\u0000\u0000\u0000\u02a9\u02aa\u0005d\u0000\u0000\u02aa\u02ab\u0005"+ + "e\u0000\u0000\u02ab\u02ac\u0005s\u0000\u0000\u02ac\u02ad\u0005c\u0000"+ + "\u0000\u02adi\u0001\u0000\u0000\u0000\u02ae\u02af\u0005.\u0000\u0000\u02af"+ + "k\u0001\u0000\u0000\u0000\u02b0\u02b1\u0005f\u0000\u0000\u02b1\u02b2\u0005"+ + "a\u0000\u0000\u02b2\u02b3\u0005l\u0000\u0000\u02b3\u02b4\u0005s\u0000"+ + "\u0000\u02b4\u02b5\u0005e\u0000\u0000\u02b5m\u0001\u0000\u0000\u0000\u02b6"+ + "\u02b7\u0005f\u0000\u0000\u02b7\u02b8\u0005i\u0000\u0000\u02b8\u02b9\u0005"+ + "r\u0000\u0000\u02b9\u02ba\u0005s\u0000\u0000\u02ba\u02bb\u0005t\u0000"+ + "\u0000\u02bbo\u0001\u0000\u0000\u0000\u02bc\u02bd\u0005l\u0000\u0000\u02bd"+ + "\u02be\u0005a\u0000\u0000\u02be\u02bf\u0005s\u0000\u0000\u02bf\u02c0\u0005"+ + "t\u0000\u0000\u02c0q\u0001\u0000\u0000\u0000\u02c1\u02c2\u0005(\u0000"+ + "\u0000\u02c2s\u0001\u0000\u0000\u0000\u02c3\u02c4\u0005i\u0000\u0000\u02c4"+ + "\u02c5\u0005n\u0000\u0000\u02c5u\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005"+ + "i\u0000\u0000\u02c7\u02c8\u0005s\u0000\u0000\u02c8w\u0001\u0000\u0000"+ + "\u0000\u02c9\u02ca\u0005l\u0000\u0000\u02ca\u02cb\u0005i\u0000\u0000\u02cb"+ + "\u02cc\u0005k\u0000\u0000\u02cc\u02cd\u0005e\u0000\u0000\u02cdy\u0001"+ + "\u0000\u0000\u0000\u02ce\u02cf\u0005n\u0000\u0000\u02cf\u02d0\u0005o\u0000"+ + "\u0000\u02d0\u02d1\u0005t\u0000\u0000\u02d1{\u0001\u0000\u0000\u0000\u02d2"+ + "\u02d3\u0005n\u0000\u0000\u02d3\u02d4\u0005u\u0000\u0000\u02d4\u02d5\u0005"+ + "l\u0000\u0000\u02d5\u02d6\u0005l\u0000\u0000\u02d6}\u0001\u0000\u0000"+ + "\u0000\u02d7\u02d8\u0005n\u0000\u0000\u02d8\u02d9\u0005u\u0000\u0000\u02d9"+ + "\u02da\u0005l\u0000\u0000\u02da\u02db\u0005l\u0000\u0000\u02db\u02dc\u0005"+ + "s\u0000\u0000\u02dc\u007f\u0001\u0000\u0000\u0000\u02dd\u02de\u0005o\u0000"+ + "\u0000\u02de\u02df\u0005r\u0000\u0000\u02df\u0081\u0001\u0000\u0000\u0000"+ + "\u02e0\u02e1\u0005?\u0000\u0000\u02e1\u0083\u0001\u0000\u0000\u0000\u02e2"+ + "\u02e3\u0005r\u0000\u0000\u02e3\u02e4\u0005l\u0000\u0000\u02e4\u02e5\u0005"+ + "i\u0000\u0000\u02e5\u02e6\u0005k\u0000\u0000\u02e6\u02e7\u0005e\u0000"+ + "\u0000\u02e7\u0085\u0001\u0000\u0000\u0000\u02e8\u02e9\u0005)\u0000\u0000"+ + "\u02e9\u0087\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005t\u0000\u0000\u02eb"+ + "\u02ec\u0005r\u0000\u0000\u02ec\u02ed\u0005u\u0000\u0000\u02ed\u02ee\u0005"+ + "e\u0000\u0000\u02ee\u0089\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005=\u0000"+ + "\u0000\u02f0\u02f1\u0005=\u0000\u0000\u02f1\u008b\u0001\u0000\u0000\u0000"+ + "\u02f2\u02f3\u0005=\u0000\u0000\u02f3\u02f4\u0005~\u0000\u0000\u02f4\u008d"+ + "\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005!\u0000\u0000\u02f6\u02f7\u0005"+ + "=\u0000\u0000\u02f7\u008f\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005<\u0000"+ + "\u0000\u02f9\u0091\u0001\u0000\u0000\u0000\u02fa\u02fb\u0005<\u0000\u0000"+ + "\u02fb\u02fc\u0005=\u0000\u0000\u02fc\u0093\u0001\u0000\u0000\u0000\u02fd"+ + "\u02fe\u0005>\u0000\u0000\u02fe\u0095\u0001\u0000\u0000\u0000\u02ff\u0300"+ + "\u0005>\u0000\u0000\u0300\u0301\u0005=\u0000\u0000\u0301\u0097\u0001\u0000"+ + "\u0000\u0000\u0302\u0303\u0005+\u0000\u0000\u0303\u0099\u0001\u0000\u0000"+ + "\u0000\u0304\u0305\u0005-\u0000\u0000\u0305\u009b\u0001\u0000\u0000\u0000"+ + "\u0306\u0307\u0005*\u0000\u0000\u0307\u009d\u0001\u0000\u0000\u0000\u0308"+ + "\u0309\u0005/\u0000\u0000\u0309\u009f\u0001\u0000\u0000\u0000\u030a\u030b"+ + "\u0005%\u0000\u0000\u030b\u00a1\u0001\u0000\u0000\u0000\u030c\u030d\u0005"+ + "[\u0000\u0000\u030d\u030e\u0001\u0000\u0000\u0000\u030e\u030f\u0006K\u0000"+ + "\u0000\u030f\u0310\u0006K\u0000\u0000\u0310\u00a3\u0001\u0000\u0000\u0000"+ + "\u0311\u0312\u0005]\u0000\u0000\u0312\u0313\u0001\u0000\u0000\u0000\u0313"+ + "\u0314\u0006L\r\u0000\u0314\u0315\u0006L\r\u0000\u0315\u00a5\u0001\u0000"+ + "\u0000\u0000\u0316\u031a\u0003F\u001d\u0000\u0317\u0319\u0003V%\u0000"+ + "\u0318\u0317\u0001\u0000\u0000\u0000\u0319\u031c\u0001\u0000\u0000\u0000"+ + "\u031a\u0318\u0001\u0000\u0000\u0000\u031a\u031b\u0001\u0000\u0000\u0000"+ + "\u031b\u0327\u0001\u0000\u0000\u0000\u031c\u031a\u0001\u0000\u0000\u0000"+ + "\u031d\u0320\u0003T$\u0000\u031e\u0320\u0003N!\u0000\u031f\u031d\u0001"+ + "\u0000\u0000\u0000\u031f\u031e\u0001\u0000\u0000\u0000\u0320\u0322\u0001"+ + "\u0000\u0000\u0000\u0321\u0323\u0003V%\u0000\u0322\u0321\u0001\u0000\u0000"+ + "\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0322\u0001\u0000\u0000"+ + "\u0000\u0324\u0325\u0001\u0000\u0000\u0000\u0325\u0327\u0001\u0000\u0000"+ + "\u0000\u0326\u0316\u0001\u0000\u0000\u0000\u0326\u031f\u0001\u0000\u0000"+ + "\u0000\u0327\u00a7\u0001\u0000\u0000\u0000\u0328\u032a\u0003P\"\u0000"+ + "\u0329\u032b\u0003R#\u0000\u032a\u0329\u0001\u0000\u0000\u0000\u032b\u032c"+ + "\u0001\u0000\u0000\u0000\u032c\u032a\u0001\u0000\u0000\u0000\u032c\u032d"+ + "\u0001\u0000\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e\u032f"+ + "\u0003P\"\u0000\u032f\u00a9\u0001\u0000\u0000\u0000\u0330\u0331\u0003"+ + "\u00a8N\u0000\u0331\u00ab\u0001\u0000\u0000\u0000\u0332\u0333\u00032\u0013"+ + "\u0000\u0333\u0334\u0001\u0000\u0000\u0000\u0334\u0335\u0006P\t\u0000"+ + "\u0335\u00ad\u0001\u0000\u0000\u0000\u0336\u0337\u00034\u0014\u0000\u0337"+ + "\u0338\u0001\u0000\u0000\u0000\u0338\u0339\u0006Q\t\u0000\u0339\u00af"+ + "\u0001\u0000\u0000\u0000\u033a\u033b\u00036\u0015\u0000\u033b\u033c\u0001"+ + "\u0000\u0000\u0000\u033c\u033d\u0006R\t\u0000\u033d\u00b1\u0001\u0000"+ + "\u0000\u0000\u033e\u033f\u0003B\u001b\u0000\u033f\u0340\u0001\u0000\u0000"+ + "\u0000\u0340\u0341\u0006S\f\u0000\u0341\u0342\u0006S\r\u0000\u0342\u00b3"+ + "\u0001\u0000\u0000\u0000\u0343\u0344\u0003\u00a2K\u0000\u0344\u0345\u0001"+ + "\u0000\u0000\u0000\u0345\u0346\u0006T\n\u0000\u0346\u00b5\u0001\u0000"+ + "\u0000\u0000\u0347\u0348\u0003\u00a4L\u0000\u0348\u0349\u0001\u0000\u0000"+ + "\u0000\u0349\u034a\u0006U\u000e\u0000\u034a\u00b7\u0001\u0000\u0000\u0000"+ + "\u034b\u034c\u0003f-\u0000\u034c\u034d\u0001\u0000\u0000\u0000\u034d\u034e"+ + "\u0006V\u000f\u0000\u034e\u00b9\u0001\u0000\u0000\u0000\u034f\u0350\u0003"+ + "d,\u0000\u0350\u0351\u0001\u0000\u0000\u0000\u0351\u0352\u0006W\u0010"+ + "\u0000\u0352\u00bb\u0001\u0000\u0000\u0000\u0353\u0354\u0003X&\u0000\u0354"+ + "\u0355\u0001\u0000\u0000\u0000\u0355\u0356\u0006X\u0011\u0000\u0356\u00bd"+ + "\u0001\u0000\u0000\u0000\u0357\u0358\u0005o\u0000\u0000\u0358\u0359\u0005"+ + "p\u0000\u0000\u0359\u035a\u0005t\u0000\u0000\u035a\u035b\u0005i\u0000"+ + "\u0000\u035b\u035c\u0005o\u0000\u0000\u035c\u035d\u0005n\u0000\u0000\u035d"+ + "\u035e\u0005s\u0000\u0000\u035e\u00bf\u0001\u0000\u0000\u0000\u035f\u0360"+ + "\u0005m\u0000\u0000\u0360\u0361\u0005e\u0000\u0000\u0361\u0362\u0005t"+ + "\u0000\u0000\u0362\u0363\u0005a\u0000\u0000\u0363\u0364\u0005d\u0000\u0000"+ + "\u0364\u0365\u0005a\u0000\u0000\u0365\u0366\u0005t\u0000\u0000\u0366\u0367"+ + "\u0005a\u0000\u0000\u0367\u00c1\u0001\u0000\u0000\u0000\u0368\u036c\b"+ + "\n\u0000\u0000\u0369\u036a\u0005/\u0000\u0000\u036a\u036c\b\u000b\u0000"+ + "\u0000\u036b\u0368\u0001\u0000\u0000\u0000\u036b\u0369\u0001\u0000\u0000"+ + "\u0000\u036c\u00c3\u0001\u0000\u0000\u0000\u036d\u036f\u0003\u00c2[\u0000"+ + "\u036e\u036d\u0001\u0000\u0000\u0000\u036f\u0370\u0001\u0000\u0000\u0000"+ + "\u0370\u036e\u0001\u0000\u0000\u0000\u0370\u0371\u0001\u0000\u0000\u0000"+ + "\u0371\u00c5\u0001\u0000\u0000\u0000\u0372\u0373\u0003\u00aaO\u0000\u0373"+ + "\u0374\u0001\u0000\u0000\u0000\u0374\u0375\u0006]\u0012\u0000\u0375\u00c7"+ + "\u0001\u0000\u0000\u0000\u0376\u0377\u00032\u0013\u0000\u0377\u0378\u0001"+ + "\u0000\u0000\u0000\u0378\u0379\u0006^\t\u0000\u0379\u00c9\u0001\u0000"+ + "\u0000\u0000\u037a\u037b\u00034\u0014\u0000\u037b\u037c\u0001\u0000\u0000"+ + "\u0000\u037c\u037d\u0006_\t\u0000\u037d\u00cb\u0001\u0000\u0000\u0000"+ + "\u037e\u037f\u00036\u0015\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380"+ + "\u0381\u0006`\t\u0000\u0381\u00cd\u0001\u0000\u0000\u0000\u0382\u0383"+ + "\u0003B\u001b\u0000\u0383\u0384\u0001\u0000\u0000\u0000\u0384\u0385\u0006"+ + "a\f\u0000\u0385\u0386\u0006a\r\u0000\u0386\u00cf\u0001\u0000\u0000\u0000"+ + "\u0387\u0388\u0003j/\u0000\u0388\u0389\u0001\u0000\u0000\u0000\u0389\u038a"+ + "\u0006b\u0013\u0000\u038a\u00d1\u0001\u0000\u0000\u0000\u038b\u038c\u0003"+ + "f-\u0000\u038c\u038d\u0001\u0000\u0000\u0000\u038d\u038e\u0006c\u000f"+ + "\u0000\u038e\u00d3\u0001\u0000\u0000\u0000\u038f\u0394\u0003F\u001d\u0000"+ + "\u0390\u0394\u0003D\u001c\u0000\u0391\u0394\u0003T$\u0000\u0392\u0394"+ + "\u0003\u009cH\u0000\u0393\u038f\u0001\u0000\u0000\u0000\u0393\u0390\u0001"+ + "\u0000\u0000\u0000\u0393\u0391\u0001\u0000\u0000\u0000\u0393\u0392\u0001"+ + "\u0000\u0000\u0000\u0394\u00d5\u0001\u0000\u0000\u0000\u0395\u0398\u0003"+ + "F\u001d\u0000\u0396\u0398\u0003\u009cH\u0000\u0397\u0395\u0001\u0000\u0000"+ + "\u0000\u0397\u0396\u0001\u0000\u0000\u0000\u0398\u039c\u0001\u0000\u0000"+ + "\u0000\u0399\u039b\u0003\u00d4d\u0000\u039a\u0399\u0001\u0000\u0000\u0000"+ + "\u039b\u039e\u0001\u0000\u0000\u0000\u039c\u039a\u0001\u0000\u0000\u0000"+ + "\u039c\u039d\u0001\u0000\u0000\u0000\u039d\u03a9\u0001\u0000\u0000\u0000"+ + "\u039e\u039c\u0001\u0000\u0000\u0000\u039f\u03a2\u0003T$\u0000\u03a0\u03a2"+ + "\u0003N!\u0000\u03a1\u039f\u0001\u0000\u0000\u0000\u03a1\u03a0\u0001\u0000"+ + "\u0000\u0000\u03a2\u03a4\u0001\u0000\u0000\u0000\u03a3\u03a5\u0003\u00d4"+ + "d\u0000\u03a4\u03a3\u0001\u0000\u0000\u0000\u03a5\u03a6\u0001\u0000\u0000"+ + "\u0000\u03a6\u03a4\u0001\u0000\u0000\u0000\u03a6\u03a7\u0001\u0000\u0000"+ + "\u0000\u03a7\u03a9\u0001\u0000\u0000\u0000\u03a8\u0397\u0001\u0000\u0000"+ + "\u0000\u03a8\u03a1\u0001\u0000\u0000\u0000\u03a9\u00d7\u0001\u0000\u0000"+ + "\u0000\u03aa\u03ad\u0003\u00d6e\u0000\u03ab\u03ad\u0003\u00a8N\u0000\u03ac"+ + "\u03aa\u0001\u0000\u0000\u0000\u03ac\u03ab\u0001\u0000\u0000\u0000\u03ad"+ + "\u03ae\u0001\u0000\u0000\u0000\u03ae\u03ac\u0001\u0000\u0000\u0000\u03ae"+ + "\u03af\u0001\u0000\u0000\u0000\u03af\u00d9\u0001\u0000\u0000\u0000\u03b0"+ + "\u03b1\u00032\u0013\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3"+ + "\u0006g\t\u0000\u03b3\u00db\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003"+ + "4\u0014\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7\u0006h\t"+ + "\u0000\u03b7\u00dd\u0001\u0000\u0000\u0000\u03b8\u03b9\u00036\u0015\u0000"+ + "\u03b9\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006i\t\u0000\u03bb"+ + "\u00df\u0001\u0000\u0000\u0000\u03bc\u03bd\u0003B\u001b\u0000\u03bd\u03be"+ + "\u0001\u0000\u0000\u0000\u03be\u03bf\u0006j\f\u0000\u03bf\u03c0\u0006"+ + "j\r\u0000\u03c0\u00e1\u0001\u0000\u0000\u0000\u03c1\u03c2\u0003d,\u0000"+ + "\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006k\u0010\u0000\u03c4"+ + "\u00e3\u0001\u0000\u0000\u0000\u03c5\u03c6\u0003f-\u0000\u03c6\u03c7\u0001"+ + "\u0000\u0000\u0000\u03c7\u03c8\u0006l\u000f\u0000\u03c8\u00e5\u0001\u0000"+ + "\u0000\u0000\u03c9\u03ca\u0003j/\u0000\u03ca\u03cb\u0001\u0000\u0000\u0000"+ + "\u03cb\u03cc\u0006m\u0013\u0000\u03cc\u00e7\u0001\u0000\u0000\u0000\u03cd"+ + "\u03ce\u0005a\u0000\u0000\u03ce\u03cf\u0005s\u0000\u0000\u03cf\u00e9\u0001"+ + "\u0000\u0000\u0000\u03d0\u03d1\u0003\u00d8f\u0000\u03d1\u03d2\u0001\u0000"+ + "\u0000\u0000\u03d2\u03d3\u0006o\u0014\u0000\u03d3\u00eb\u0001\u0000\u0000"+ + "\u0000\u03d4\u03d5\u00032\u0013\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000"+ + "\u03d6\u03d7\u0006p\t\u0000\u03d7\u00ed\u0001\u0000\u0000\u0000\u03d8"+ + "\u03d9\u00034\u0014\u0000\u03d9\u03da\u0001\u0000\u0000\u0000\u03da\u03db"+ + "\u0006q\t\u0000\u03db\u00ef\u0001\u0000\u0000\u0000\u03dc\u03dd\u0003"+ + "6\u0015\u0000\u03dd\u03de\u0001\u0000\u0000\u0000\u03de\u03df\u0006r\t"+ + "\u0000\u03df\u00f1\u0001\u0000\u0000\u0000\u03e0\u03e1\u0003B\u001b\u0000"+ + "\u03e1\u03e2\u0001\u0000\u0000\u0000\u03e2\u03e3\u0006s\f\u0000\u03e3"+ + "\u03e4\u0006s\r\u0000\u03e4\u00f3\u0001\u0000\u0000\u0000\u03e5\u03e6"+ + "\u0003\u00a2K\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006"+ + "t\n\u0000\u03e8\u03e9\u0006t\u0015\u0000\u03e9\u00f5\u0001\u0000\u0000"+ + "\u0000\u03ea\u03eb\u0005o\u0000\u0000\u03eb\u03ec\u0005n\u0000\u0000\u03ec"+ + "\u03ed\u0001\u0000\u0000\u0000\u03ed\u03ee\u0006u\u0016\u0000\u03ee\u00f7"+ + "\u0001\u0000\u0000\u0000\u03ef\u03f0\u0005w\u0000\u0000\u03f0\u03f1\u0005"+ + "i\u0000\u0000\u03f1\u03f2\u0005t\u0000\u0000\u03f2\u03f3\u0005h\u0000"+ + "\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006v\u0016\u0000"+ + "\u03f5\u00f9\u0001\u0000\u0000\u0000\u03f6\u03f7\b\f\u0000\u0000\u03f7"+ + "\u00fb\u0001\u0000\u0000\u0000\u03f8\u03fa\u0003\u00faw\u0000\u03f9\u03f8"+ + "\u0001\u0000\u0000\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03f9"+ + "\u0001\u0000\u0000\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd"+ + "\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003\u0140\u009a\u0000\u03fe\u0400"+ + "\u0001\u0000\u0000\u0000\u03ff\u03f9\u0001\u0000\u0000\u0000\u03ff\u0400"+ + "\u0001\u0000\u0000\u0000\u0400\u0402\u0001\u0000\u0000\u0000\u0401\u0403"+ + "\u0003\u00faw\u0000\u0402\u0401\u0001\u0000\u0000\u0000\u0403\u0404\u0001"+ + "\u0000\u0000\u0000\u0404\u0402\u0001\u0000\u0000\u0000\u0404\u0405\u0001"+ + "\u0000\u0000\u0000\u0405\u00fd\u0001\u0000\u0000\u0000\u0406\u0407\u0003"+ + "\u00aaO\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006y\u0012"+ + "\u0000\u0409\u00ff\u0001\u0000\u0000\u0000\u040a\u040b\u0003\u00fcx\u0000"+ + "\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006z\u0017\u0000\u040d"+ + "\u0101\u0001\u0000\u0000\u0000\u040e\u040f\u00032\u0013\u0000\u040f\u0410"+ + "\u0001\u0000\u0000\u0000\u0410\u0411\u0006{\t\u0000\u0411\u0103\u0001"+ + "\u0000\u0000\u0000\u0412\u0413\u00034\u0014\u0000\u0413\u0414\u0001\u0000"+ + "\u0000\u0000\u0414\u0415\u0006|\t\u0000\u0415\u0105\u0001\u0000\u0000"+ + "\u0000\u0416\u0417\u00036\u0015\u0000\u0417\u0418\u0001\u0000\u0000\u0000"+ + "\u0418\u0419\u0006}\t\u0000\u0419\u0107\u0001\u0000\u0000\u0000\u041a"+ + "\u041b\u0003B\u001b\u0000\u041b\u041c\u0001\u0000\u0000\u0000\u041c\u041d"+ + "\u0006~\f\u0000\u041d\u041e\u0006~\r\u0000\u041e\u041f\u0006~\r\u0000"+ + "\u041f\u0109\u0001\u0000\u0000\u0000\u0420\u0421\u0003d,\u0000\u0421\u0422"+ + "\u0001\u0000\u0000\u0000\u0422\u0423\u0006\u007f\u0010\u0000\u0423\u010b"+ + "\u0001\u0000\u0000\u0000\u0424\u0425\u0003f-\u0000\u0425\u0426\u0001\u0000"+ + "\u0000\u0000\u0426\u0427\u0006\u0080\u000f\u0000\u0427\u010d\u0001\u0000"+ + "\u0000\u0000\u0428\u0429\u0003j/\u0000\u0429\u042a\u0001\u0000\u0000\u0000"+ + "\u042a\u042b\u0006\u0081\u0013\u0000\u042b\u010f\u0001\u0000\u0000\u0000"+ + "\u042c\u042d\u0003\u00f8v\u0000\u042d\u042e\u0001\u0000\u0000\u0000\u042e"+ + "\u042f\u0006\u0082\u0018\u0000\u042f\u0111\u0001\u0000\u0000\u0000\u0430"+ + "\u0431\u0003\u00d8f\u0000\u0431\u0432\u0001\u0000\u0000\u0000\u0432\u0433"+ + "\u0006\u0083\u0014\u0000\u0433\u0113\u0001\u0000\u0000\u0000\u0434\u0435"+ + "\u0003\u00aaO\u0000\u0435\u0436\u0001\u0000\u0000\u0000\u0436\u0437\u0006"+ + "\u0084\u0012\u0000\u0437\u0115\u0001\u0000\u0000\u0000\u0438\u0439\u0003"+ + "2\u0013\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a\u043b\u0006\u0085"+ + "\t\u0000\u043b\u0117\u0001\u0000\u0000\u0000\u043c\u043d\u00034\u0014"+ + "\u0000\u043d\u043e\u0001\u0000\u0000\u0000\u043e\u043f\u0006\u0086\t\u0000"+ + "\u043f\u0119\u0001\u0000\u0000\u0000\u0440\u0441\u00036\u0015\u0000\u0441"+ + "\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006\u0087\t\u0000\u0443\u011b"+ + "\u0001\u0000\u0000\u0000\u0444\u0445\u0003B\u001b\u0000\u0445\u0446\u0001"+ + "\u0000\u0000\u0000\u0446\u0447\u0006\u0088\f\u0000\u0447\u0448\u0006\u0088"+ + "\r\u0000\u0448\u011d\u0001\u0000\u0000\u0000\u0449\u044a\u0003j/\u0000"+ + "\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u0089\u0013\u0000"+ + "\u044c\u011f\u0001\u0000\u0000\u0000\u044d\u044e\u0003\u00aaO\u0000\u044e"+ + "\u044f\u0001\u0000\u0000\u0000\u044f\u0450\u0006\u008a\u0012\u0000\u0450"+ + "\u0121\u0001\u0000\u0000\u0000\u0451\u0452\u0003\u00a6M\u0000\u0452\u0453"+ + "\u0001\u0000\u0000\u0000\u0453\u0454\u0006\u008b\u0019\u0000\u0454\u0123"+ + "\u0001\u0000\u0000\u0000\u0455\u0456\u00032\u0013\u0000\u0456\u0457\u0001"+ + "\u0000\u0000\u0000\u0457\u0458\u0006\u008c\t\u0000\u0458\u0125\u0001\u0000"+ + "\u0000\u0000\u0459\u045a\u00034\u0014\u0000\u045a\u045b\u0001\u0000\u0000"+ + "\u0000\u045b\u045c\u0006\u008d\t\u0000\u045c\u0127\u0001\u0000\u0000\u0000"+ + "\u045d\u045e\u00036\u0015\u0000\u045e\u045f\u0001\u0000\u0000\u0000\u045f"+ + "\u0460\u0006\u008e\t\u0000\u0460\u0129\u0001\u0000\u0000\u0000\u0461\u0462"+ + "\u0003B\u001b\u0000\u0462\u0463\u0001\u0000\u0000\u0000\u0463\u0464\u0006"+ + "\u008f\f\u0000\u0464\u0465\u0006\u008f\r\u0000\u0465\u012b\u0001\u0000"+ + "\u0000\u0000\u0466\u0467\u0005i\u0000\u0000\u0467\u0468\u0005n\u0000\u0000"+ + "\u0468\u0469\u0005f\u0000\u0000\u0469\u046a\u0005o\u0000\u0000\u046a\u012d"+ + "\u0001\u0000\u0000\u0000\u046b\u046c\u00032\u0013\u0000\u046c\u046d\u0001"+ + "\u0000\u0000\u0000\u046d\u046e\u0006\u0091\t\u0000\u046e\u012f\u0001\u0000"+ + "\u0000\u0000\u046f\u0470\u00034\u0014\u0000\u0470\u0471\u0001\u0000\u0000"+ + "\u0000\u0471\u0472\u0006\u0092\t\u0000\u0472\u0131\u0001\u0000\u0000\u0000"+ + "\u0473\u0474\u00036\u0015\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475"+ + "\u0476\u0006\u0093\t\u0000\u0476\u0133\u0001\u0000\u0000\u0000\u0477\u0478"+ + "\u0003B\u001b\u0000\u0478\u0479\u0001\u0000\u0000\u0000\u0479\u047a\u0006"+ + "\u0094\f\u0000\u047a\u047b\u0006\u0094\r\u0000\u047b\u0135\u0001\u0000"+ + "\u0000\u0000\u047c\u047d\u0005f\u0000\u0000\u047d\u047e\u0005u\u0000\u0000"+ + "\u047e\u047f\u0005n\u0000\u0000\u047f\u0480\u0005c\u0000\u0000\u0480\u0481"+ + "\u0005t\u0000\u0000\u0481\u0482\u0005i\u0000\u0000\u0482\u0483\u0005o"+ + "\u0000\u0000\u0483\u0484\u0005n\u0000\u0000\u0484\u0485\u0005s\u0000\u0000"+ + "\u0485\u0137\u0001\u0000\u0000\u0000\u0486\u0487\u00032\u0013\u0000\u0487"+ + "\u0488\u0001\u0000\u0000\u0000\u0488\u0489\u0006\u0096\t\u0000\u0489\u0139"+ + "\u0001\u0000\u0000\u0000\u048a\u048b\u00034\u0014\u0000\u048b\u048c\u0001"+ + "\u0000\u0000\u0000\u048c\u048d\u0006\u0097\t\u0000\u048d\u013b\u0001\u0000"+ + "\u0000\u0000\u048e\u048f\u00036\u0015\u0000\u048f\u0490\u0001\u0000\u0000"+ + "\u0000\u0490\u0491\u0006\u0098\t\u0000\u0491\u013d\u0001\u0000\u0000\u0000"+ + "\u0492\u0493\u0003\u00a4L\u0000\u0493\u0494\u0001\u0000\u0000\u0000\u0494"+ + "\u0495\u0006\u0099\u000e\u0000\u0495\u0496\u0006\u0099\r\u0000\u0496\u013f"+ + "\u0001\u0000\u0000\u0000\u0497\u0498\u0005:\u0000\u0000\u0498\u0141\u0001"+ + "\u0000\u0000\u0000\u0499\u049f\u0003N!\u0000\u049a\u049f\u0003D\u001c"+ + "\u0000\u049b\u049f\u0003j/\u0000\u049c\u049f\u0003F\u001d\u0000\u049d"+ + "\u049f\u0003T$\u0000\u049e\u0499\u0001\u0000\u0000\u0000\u049e\u049a\u0001"+ + "\u0000\u0000\u0000\u049e\u049b\u0001\u0000\u0000\u0000\u049e\u049c\u0001"+ + "\u0000\u0000\u0000\u049e\u049d\u0001\u0000\u0000\u0000\u049f\u04a0\u0001"+ + "\u0000\u0000\u0000\u04a0\u049e\u0001\u0000\u0000\u0000\u04a0\u04a1\u0001"+ + "\u0000\u0000\u0000\u04a1\u0143\u0001\u0000\u0000\u0000\u04a2\u04a3\u0003"+ + "2\u0013\u0000\u04a3\u04a4\u0001\u0000\u0000\u0000\u04a4\u04a5\u0006\u009c"+ + "\t\u0000\u04a5\u0145\u0001\u0000\u0000\u0000\u04a6\u04a7\u00034\u0014"+ + "\u0000\u04a7\u04a8\u0001\u0000\u0000\u0000\u04a8\u04a9\u0006\u009d\t\u0000"+ + "\u04a9\u0147\u0001\u0000\u0000\u0000\u04aa\u04ab\u00036\u0015\u0000\u04ab"+ + "\u04ac\u0001\u0000\u0000\u0000\u04ac\u04ad\u0006\u009e\t\u0000\u04ad\u0149"+ + "\u0001\u0000\u0000\u0000:\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u000b\u01e3\u01ed\u01f1\u01f4\u01fd\u01ff\u020a\u0233\u0238\u0241"+ + "\u0248\u024d\u024f\u025a\u0262\u0265\u0267\u026c\u0271\u0277\u027e\u0283"+ + "\u0289\u028c\u0294\u0298\u031a\u031f\u0324\u0326\u032c\u036b\u0370\u0393"+ + "\u0397\u039c\u03a1\u03a6\u03a8\u03ac\u03ae\u03fb\u03ff\u0404\u049e\u04a0"+ + "\u001a\u0005\u0002\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001"+ + "\u0000\u0005\u0003\u0000\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000"+ + "\u0005\t\u0000\u0000\u0001\u0000\u0007@\u0000\u0005\u0000\u0000\u0007"+ + "\u001a\u0000\u0004\u0000\u0000\u0007A\u0000\u0007\"\u0000\u0007!\u0000"+ + "\u0007\u001b\u0000\u0007C\u0000\u0007$\u0000\u0007M\u0000\u0005\u000b"+ + "\u0000\u0005\u0007\u0000\u0007W\u0000\u0007V\u0000\u0007B\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index b8c5f609e75e5..ff5de4e348db0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -70,6 +70,7 @@ null null null null +'options' 'metadata' null null @@ -137,7 +138,7 @@ EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT PIPE -STRING +QUOTED_STRING INTEGER_LITERAL DECIMAL_LITERAL BY @@ -181,6 +182,7 @@ QUOTED_IDENTIFIER EXPR_LINE_COMMENT EXPR_MULTILINE_COMMENT EXPR_WS +OPTIONS METADATA FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT @@ -236,13 +238,15 @@ rowCommand fields field fromCommand +fromIdentifier +fromOptions +configOption metadata metadataOption deprecated_metadata evalCommand statsCommand inlinestatsCommand -fromIdentifier qualifiedName qualifiedNamePattern identifier @@ -275,4 +279,4 @@ enrichWithClause atn: -[4, 1, 108, 510, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 112, 8, 1, 10, 1, 12, 1, 115, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 122, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 137, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 149, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 156, 8, 5, 10, 5, 12, 5, 159, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 166, 8, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 178, 8, 5, 10, 5, 12, 5, 181, 9, 5, 1, 6, 1, 6, 3, 6, 185, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 197, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 204, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 210, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 218, 8, 8, 10, 8, 12, 8, 221, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 230, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 238, 8, 10, 10, 10, 12, 10, 241, 9, 10, 3, 10, 243, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 253, 8, 12, 10, 12, 12, 12, 256, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 263, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 269, 8, 14, 10, 14, 12, 14, 272, 9, 14, 1, 14, 3, 14, 275, 8, 14, 1, 15, 1, 15, 3, 15, 279, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 285, 8, 16, 10, 16, 12, 16, 288, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 299, 8, 19, 1, 19, 1, 19, 3, 19, 303, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 309, 8, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 5, 22, 316, 8, 22, 10, 22, 12, 22, 319, 9, 22, 1, 23, 1, 23, 1, 23, 5, 23, 324, 8, 23, 10, 23, 12, 23, 327, 9, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 346, 8, 26, 10, 26, 12, 26, 349, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 368, 8, 26, 10, 26, 12, 26, 371, 9, 26, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 384, 8, 28, 10, 28, 12, 28, 387, 9, 28, 1, 29, 1, 29, 3, 29, 391, 8, 29, 1, 29, 1, 29, 3, 29, 395, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 401, 8, 30, 10, 30, 12, 30, 404, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 410, 8, 31, 10, 31, 12, 31, 413, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 419, 8, 32, 10, 32, 12, 32, 422, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 432, 8, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 5, 37, 444, 8, 37, 10, 37, 12, 37, 447, 9, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 40, 1, 40, 3, 40, 457, 8, 40, 1, 41, 3, 41, 460, 8, 41, 1, 41, 1, 41, 1, 42, 3, 42, 465, 8, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 3, 49, 490, 8, 49, 1, 49, 1, 49, 1, 49, 1, 49, 5, 49, 496, 8, 49, 10, 49, 12, 49, 499, 9, 49, 3, 49, 501, 8, 49, 1, 50, 1, 50, 1, 50, 3, 50, 506, 8, 50, 1, 50, 1, 50, 1, 50, 0, 3, 2, 10, 16, 51, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 72, 72, 1, 0, 66, 67, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 535, 0, 102, 1, 0, 0, 0, 2, 105, 1, 0, 0, 0, 4, 121, 1, 0, 0, 0, 6, 136, 1, 0, 0, 0, 8, 138, 1, 0, 0, 0, 10, 169, 1, 0, 0, 0, 12, 196, 1, 0, 0, 0, 14, 203, 1, 0, 0, 0, 16, 209, 1, 0, 0, 0, 18, 229, 1, 0, 0, 0, 20, 231, 1, 0, 0, 0, 22, 246, 1, 0, 0, 0, 24, 249, 1, 0, 0, 0, 26, 262, 1, 0, 0, 0, 28, 264, 1, 0, 0, 0, 30, 278, 1, 0, 0, 0, 32, 280, 1, 0, 0, 0, 34, 289, 1, 0, 0, 0, 36, 293, 1, 0, 0, 0, 38, 296, 1, 0, 0, 0, 40, 304, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 312, 1, 0, 0, 0, 46, 320, 1, 0, 0, 0, 48, 328, 1, 0, 0, 0, 50, 330, 1, 0, 0, 0, 52, 374, 1, 0, 0, 0, 54, 376, 1, 0, 0, 0, 56, 379, 1, 0, 0, 0, 58, 388, 1, 0, 0, 0, 60, 396, 1, 0, 0, 0, 62, 405, 1, 0, 0, 0, 64, 414, 1, 0, 0, 0, 66, 423, 1, 0, 0, 0, 68, 427, 1, 0, 0, 0, 70, 433, 1, 0, 0, 0, 72, 437, 1, 0, 0, 0, 74, 440, 1, 0, 0, 0, 76, 448, 1, 0, 0, 0, 78, 452, 1, 0, 0, 0, 80, 456, 1, 0, 0, 0, 82, 459, 1, 0, 0, 0, 84, 464, 1, 0, 0, 0, 86, 468, 1, 0, 0, 0, 88, 470, 1, 0, 0, 0, 90, 472, 1, 0, 0, 0, 92, 475, 1, 0, 0, 0, 94, 479, 1, 0, 0, 0, 96, 482, 1, 0, 0, 0, 98, 485, 1, 0, 0, 0, 100, 505, 1, 0, 0, 0, 102, 103, 3, 2, 1, 0, 103, 104, 5, 0, 0, 1, 104, 1, 1, 0, 0, 0, 105, 106, 6, 1, -1, 0, 106, 107, 3, 4, 2, 0, 107, 113, 1, 0, 0, 0, 108, 109, 10, 1, 0, 0, 109, 110, 5, 26, 0, 0, 110, 112, 3, 6, 3, 0, 111, 108, 1, 0, 0, 0, 112, 115, 1, 0, 0, 0, 113, 111, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 3, 1, 0, 0, 0, 115, 113, 1, 0, 0, 0, 116, 122, 3, 90, 45, 0, 117, 122, 3, 28, 14, 0, 118, 122, 3, 22, 11, 0, 119, 122, 3, 94, 47, 0, 120, 122, 3, 96, 48, 0, 121, 116, 1, 0, 0, 0, 121, 117, 1, 0, 0, 0, 121, 118, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 121, 120, 1, 0, 0, 0, 122, 5, 1, 0, 0, 0, 123, 137, 3, 36, 18, 0, 124, 137, 3, 40, 20, 0, 125, 137, 3, 54, 27, 0, 126, 137, 3, 60, 30, 0, 127, 137, 3, 56, 28, 0, 128, 137, 3, 38, 19, 0, 129, 137, 3, 8, 4, 0, 130, 137, 3, 62, 31, 0, 131, 137, 3, 64, 32, 0, 132, 137, 3, 68, 34, 0, 133, 137, 3, 70, 35, 0, 134, 137, 3, 98, 49, 0, 135, 137, 3, 72, 36, 0, 136, 123, 1, 0, 0, 0, 136, 124, 1, 0, 0, 0, 136, 125, 1, 0, 0, 0, 136, 126, 1, 0, 0, 0, 136, 127, 1, 0, 0, 0, 136, 128, 1, 0, 0, 0, 136, 129, 1, 0, 0, 0, 136, 130, 1, 0, 0, 0, 136, 131, 1, 0, 0, 0, 136, 132, 1, 0, 0, 0, 136, 133, 1, 0, 0, 0, 136, 134, 1, 0, 0, 0, 136, 135, 1, 0, 0, 0, 137, 7, 1, 0, 0, 0, 138, 139, 5, 18, 0, 0, 139, 140, 3, 10, 5, 0, 140, 9, 1, 0, 0, 0, 141, 142, 6, 5, -1, 0, 142, 143, 5, 44, 0, 0, 143, 170, 3, 10, 5, 7, 144, 170, 3, 14, 7, 0, 145, 170, 3, 12, 6, 0, 146, 148, 3, 14, 7, 0, 147, 149, 5, 44, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 151, 5, 41, 0, 0, 151, 152, 5, 40, 0, 0, 152, 157, 3, 14, 7, 0, 153, 154, 5, 34, 0, 0, 154, 156, 3, 14, 7, 0, 155, 153, 1, 0, 0, 0, 156, 159, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 160, 1, 0, 0, 0, 159, 157, 1, 0, 0, 0, 160, 161, 5, 50, 0, 0, 161, 170, 1, 0, 0, 0, 162, 163, 3, 14, 7, 0, 163, 165, 5, 42, 0, 0, 164, 166, 5, 44, 0, 0, 165, 164, 1, 0, 0, 0, 165, 166, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 168, 5, 45, 0, 0, 168, 170, 1, 0, 0, 0, 169, 141, 1, 0, 0, 0, 169, 144, 1, 0, 0, 0, 169, 145, 1, 0, 0, 0, 169, 146, 1, 0, 0, 0, 169, 162, 1, 0, 0, 0, 170, 179, 1, 0, 0, 0, 171, 172, 10, 4, 0, 0, 172, 173, 5, 31, 0, 0, 173, 178, 3, 10, 5, 5, 174, 175, 10, 3, 0, 0, 175, 176, 5, 47, 0, 0, 176, 178, 3, 10, 5, 4, 177, 171, 1, 0, 0, 0, 177, 174, 1, 0, 0, 0, 178, 181, 1, 0, 0, 0, 179, 177, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 11, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 182, 184, 3, 14, 7, 0, 183, 185, 5, 44, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 43, 0, 0, 187, 188, 3, 86, 43, 0, 188, 197, 1, 0, 0, 0, 189, 191, 3, 14, 7, 0, 190, 192, 5, 44, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 49, 0, 0, 194, 195, 3, 86, 43, 0, 195, 197, 1, 0, 0, 0, 196, 182, 1, 0, 0, 0, 196, 189, 1, 0, 0, 0, 197, 13, 1, 0, 0, 0, 198, 204, 3, 16, 8, 0, 199, 200, 3, 16, 8, 0, 200, 201, 3, 88, 44, 0, 201, 202, 3, 16, 8, 0, 202, 204, 1, 0, 0, 0, 203, 198, 1, 0, 0, 0, 203, 199, 1, 0, 0, 0, 204, 15, 1, 0, 0, 0, 205, 206, 6, 8, -1, 0, 206, 210, 3, 18, 9, 0, 207, 208, 7, 0, 0, 0, 208, 210, 3, 16, 8, 3, 209, 205, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 219, 1, 0, 0, 0, 211, 212, 10, 2, 0, 0, 212, 213, 7, 1, 0, 0, 213, 218, 3, 16, 8, 3, 214, 215, 10, 1, 0, 0, 215, 216, 7, 0, 0, 0, 216, 218, 3, 16, 8, 2, 217, 211, 1, 0, 0, 0, 217, 214, 1, 0, 0, 0, 218, 221, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 17, 1, 0, 0, 0, 221, 219, 1, 0, 0, 0, 222, 230, 3, 52, 26, 0, 223, 230, 3, 44, 22, 0, 224, 230, 3, 20, 10, 0, 225, 226, 5, 40, 0, 0, 226, 227, 3, 10, 5, 0, 227, 228, 5, 50, 0, 0, 228, 230, 1, 0, 0, 0, 229, 222, 1, 0, 0, 0, 229, 223, 1, 0, 0, 0, 229, 224, 1, 0, 0, 0, 229, 225, 1, 0, 0, 0, 230, 19, 1, 0, 0, 0, 231, 232, 3, 48, 24, 0, 232, 242, 5, 40, 0, 0, 233, 243, 5, 61, 0, 0, 234, 239, 3, 10, 5, 0, 235, 236, 5, 34, 0, 0, 236, 238, 3, 10, 5, 0, 237, 235, 1, 0, 0, 0, 238, 241, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 243, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 233, 1, 0, 0, 0, 242, 234, 1, 0, 0, 0, 242, 243, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 245, 5, 50, 0, 0, 245, 21, 1, 0, 0, 0, 246, 247, 5, 14, 0, 0, 247, 248, 3, 24, 12, 0, 248, 23, 1, 0, 0, 0, 249, 254, 3, 26, 13, 0, 250, 251, 5, 34, 0, 0, 251, 253, 3, 26, 13, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 25, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 263, 3, 10, 5, 0, 258, 259, 3, 44, 22, 0, 259, 260, 5, 33, 0, 0, 260, 261, 3, 10, 5, 0, 261, 263, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 27, 1, 0, 0, 0, 264, 265, 5, 6, 0, 0, 265, 270, 3, 42, 21, 0, 266, 267, 5, 34, 0, 0, 267, 269, 3, 42, 21, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 274, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 3, 30, 15, 0, 274, 273, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 29, 1, 0, 0, 0, 276, 279, 3, 32, 16, 0, 277, 279, 3, 34, 17, 0, 278, 276, 1, 0, 0, 0, 278, 277, 1, 0, 0, 0, 279, 31, 1, 0, 0, 0, 280, 281, 5, 71, 0, 0, 281, 286, 3, 42, 21, 0, 282, 283, 5, 34, 0, 0, 283, 285, 3, 42, 21, 0, 284, 282, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0, 287, 33, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 290, 5, 64, 0, 0, 290, 291, 3, 32, 16, 0, 291, 292, 5, 65, 0, 0, 292, 35, 1, 0, 0, 0, 293, 294, 5, 4, 0, 0, 294, 295, 3, 24, 12, 0, 295, 37, 1, 0, 0, 0, 296, 298, 5, 17, 0, 0, 297, 299, 3, 24, 12, 0, 298, 297, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 301, 5, 30, 0, 0, 301, 303, 3, 24, 12, 0, 302, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 39, 1, 0, 0, 0, 304, 305, 5, 8, 0, 0, 305, 308, 3, 24, 12, 0, 306, 307, 5, 30, 0, 0, 307, 309, 3, 24, 12, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 41, 1, 0, 0, 0, 310, 311, 7, 2, 0, 0, 311, 43, 1, 0, 0, 0, 312, 317, 3, 48, 24, 0, 313, 314, 5, 36, 0, 0, 314, 316, 3, 48, 24, 0, 315, 313, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 45, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 325, 3, 50, 25, 0, 321, 322, 5, 36, 0, 0, 322, 324, 3, 50, 25, 0, 323, 321, 1, 0, 0, 0, 324, 327, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 47, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 328, 329, 7, 3, 0, 0, 329, 49, 1, 0, 0, 0, 330, 331, 5, 76, 0, 0, 331, 51, 1, 0, 0, 0, 332, 375, 5, 45, 0, 0, 333, 334, 3, 84, 42, 0, 334, 335, 5, 66, 0, 0, 335, 375, 1, 0, 0, 0, 336, 375, 3, 82, 41, 0, 337, 375, 3, 84, 42, 0, 338, 375, 3, 78, 39, 0, 339, 375, 5, 48, 0, 0, 340, 375, 3, 86, 43, 0, 341, 342, 5, 64, 0, 0, 342, 347, 3, 80, 40, 0, 343, 344, 5, 34, 0, 0, 344, 346, 3, 80, 40, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 350, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 351, 5, 65, 0, 0, 351, 375, 1, 0, 0, 0, 352, 353, 5, 64, 0, 0, 353, 358, 3, 78, 39, 0, 354, 355, 5, 34, 0, 0, 355, 357, 3, 78, 39, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 361, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 5, 65, 0, 0, 362, 375, 1, 0, 0, 0, 363, 364, 5, 64, 0, 0, 364, 369, 3, 86, 43, 0, 365, 366, 5, 34, 0, 0, 366, 368, 3, 86, 43, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 372, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 373, 5, 65, 0, 0, 373, 375, 1, 0, 0, 0, 374, 332, 1, 0, 0, 0, 374, 333, 1, 0, 0, 0, 374, 336, 1, 0, 0, 0, 374, 337, 1, 0, 0, 0, 374, 338, 1, 0, 0, 0, 374, 339, 1, 0, 0, 0, 374, 340, 1, 0, 0, 0, 374, 341, 1, 0, 0, 0, 374, 352, 1, 0, 0, 0, 374, 363, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 377, 5, 10, 0, 0, 377, 378, 5, 28, 0, 0, 378, 55, 1, 0, 0, 0, 379, 380, 5, 16, 0, 0, 380, 385, 3, 58, 29, 0, 381, 382, 5, 34, 0, 0, 382, 384, 3, 58, 29, 0, 383, 381, 1, 0, 0, 0, 384, 387, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 57, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 388, 390, 3, 10, 5, 0, 389, 391, 7, 4, 0, 0, 390, 389, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 394, 1, 0, 0, 0, 392, 393, 5, 46, 0, 0, 393, 395, 7, 5, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 59, 1, 0, 0, 0, 396, 397, 5, 9, 0, 0, 397, 402, 3, 46, 23, 0, 398, 399, 5, 34, 0, 0, 399, 401, 3, 46, 23, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 61, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 2, 0, 0, 406, 411, 3, 46, 23, 0, 407, 408, 5, 34, 0, 0, 408, 410, 3, 46, 23, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 63, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 415, 5, 13, 0, 0, 415, 420, 3, 66, 33, 0, 416, 417, 5, 34, 0, 0, 417, 419, 3, 66, 33, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 65, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 3, 46, 23, 0, 424, 425, 5, 80, 0, 0, 425, 426, 3, 46, 23, 0, 426, 67, 1, 0, 0, 0, 427, 428, 5, 1, 0, 0, 428, 429, 3, 18, 9, 0, 429, 431, 3, 86, 43, 0, 430, 432, 3, 74, 37, 0, 431, 430, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 69, 1, 0, 0, 0, 433, 434, 5, 7, 0, 0, 434, 435, 3, 18, 9, 0, 435, 436, 3, 86, 43, 0, 436, 71, 1, 0, 0, 0, 437, 438, 5, 12, 0, 0, 438, 439, 3, 44, 22, 0, 439, 73, 1, 0, 0, 0, 440, 445, 3, 76, 38, 0, 441, 442, 5, 34, 0, 0, 442, 444, 3, 76, 38, 0, 443, 441, 1, 0, 0, 0, 444, 447, 1, 0, 0, 0, 445, 443, 1, 0, 0, 0, 445, 446, 1, 0, 0, 0, 446, 75, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 448, 449, 3, 48, 24, 0, 449, 450, 5, 33, 0, 0, 450, 451, 3, 52, 26, 0, 451, 77, 1, 0, 0, 0, 452, 453, 7, 6, 0, 0, 453, 79, 1, 0, 0, 0, 454, 457, 3, 82, 41, 0, 455, 457, 3, 84, 42, 0, 456, 454, 1, 0, 0, 0, 456, 455, 1, 0, 0, 0, 457, 81, 1, 0, 0, 0, 458, 460, 7, 0, 0, 0, 459, 458, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 5, 29, 0, 0, 462, 83, 1, 0, 0, 0, 463, 465, 7, 0, 0, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 5, 28, 0, 0, 467, 85, 1, 0, 0, 0, 468, 469, 5, 27, 0, 0, 469, 87, 1, 0, 0, 0, 470, 471, 7, 7, 0, 0, 471, 89, 1, 0, 0, 0, 472, 473, 5, 5, 0, 0, 473, 474, 3, 92, 46, 0, 474, 91, 1, 0, 0, 0, 475, 476, 5, 64, 0, 0, 476, 477, 3, 2, 1, 0, 477, 478, 5, 65, 0, 0, 478, 93, 1, 0, 0, 0, 479, 480, 5, 15, 0, 0, 480, 481, 5, 96, 0, 0, 481, 95, 1, 0, 0, 0, 482, 483, 5, 11, 0, 0, 483, 484, 5, 100, 0, 0, 484, 97, 1, 0, 0, 0, 485, 486, 5, 3, 0, 0, 486, 489, 5, 86, 0, 0, 487, 488, 5, 84, 0, 0, 488, 490, 3, 46, 23, 0, 489, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 500, 1, 0, 0, 0, 491, 492, 5, 85, 0, 0, 492, 497, 3, 100, 50, 0, 493, 494, 5, 34, 0, 0, 494, 496, 3, 100, 50, 0, 495, 493, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 501, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 500, 491, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 99, 1, 0, 0, 0, 502, 503, 3, 46, 23, 0, 503, 504, 5, 33, 0, 0, 504, 506, 1, 0, 0, 0, 505, 502, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 508, 3, 46, 23, 0, 508, 101, 1, 0, 0, 0, 49, 113, 121, 136, 148, 157, 165, 169, 177, 179, 184, 191, 196, 203, 209, 217, 219, 229, 239, 242, 254, 262, 270, 274, 278, 286, 298, 302, 308, 317, 325, 347, 358, 369, 374, 385, 390, 394, 402, 411, 420, 431, 445, 456, 459, 464, 489, 497, 500, 505] \ No newline at end of file +[4, 1, 109, 530, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 116, 8, 1, 10, 1, 12, 1, 119, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 126, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 141, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 153, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 160, 8, 5, 10, 5, 12, 5, 163, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 3, 5, 174, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 6, 1, 6, 3, 6, 189, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 196, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 208, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 214, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 222, 8, 8, 10, 8, 12, 8, 225, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 234, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 242, 8, 10, 10, 10, 12, 10, 245, 9, 10, 3, 10, 247, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 257, 8, 12, 10, 12, 12, 12, 260, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 267, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 273, 8, 14, 10, 14, 12, 14, 276, 9, 14, 1, 14, 3, 14, 279, 8, 14, 1, 14, 3, 14, 282, 8, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 290, 8, 16, 10, 16, 12, 16, 293, 9, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 3, 18, 301, 8, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 307, 8, 19, 10, 19, 12, 19, 310, 9, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 3, 22, 321, 8, 22, 1, 22, 1, 22, 3, 22, 325, 8, 22, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 331, 8, 23, 1, 24, 1, 24, 1, 24, 5, 24, 336, 8, 24, 10, 24, 12, 24, 339, 9, 24, 1, 25, 1, 25, 1, 25, 5, 25, 344, 8, 25, 10, 25, 12, 25, 347, 9, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 366, 8, 28, 10, 28, 12, 28, 369, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 377, 8, 28, 10, 28, 12, 28, 380, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 388, 8, 28, 10, 28, 12, 28, 391, 9, 28, 1, 28, 1, 28, 3, 28, 395, 8, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 404, 8, 30, 10, 30, 12, 30, 407, 9, 30, 1, 31, 1, 31, 3, 31, 411, 8, 31, 1, 31, 1, 31, 3, 31, 415, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 421, 8, 32, 10, 32, 12, 32, 424, 9, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 430, 8, 33, 10, 33, 12, 33, 433, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 439, 8, 34, 10, 34, 12, 34, 442, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 452, 8, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 5, 39, 464, 8, 39, 10, 39, 12, 39, 467, 9, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 3, 42, 477, 8, 42, 1, 43, 3, 43, 480, 8, 43, 1, 43, 1, 43, 1, 44, 3, 44, 485, 8, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 3, 51, 510, 8, 51, 1, 51, 1, 51, 1, 51, 1, 51, 5, 51, 516, 8, 51, 10, 51, 12, 51, 519, 9, 51, 3, 51, 521, 8, 51, 1, 52, 1, 52, 1, 52, 3, 52, 526, 8, 52, 1, 52, 1, 52, 1, 52, 0, 3, 2, 10, 16, 53, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 73, 73, 1, 0, 66, 67, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 555, 0, 106, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 4, 125, 1, 0, 0, 0, 6, 140, 1, 0, 0, 0, 8, 142, 1, 0, 0, 0, 10, 173, 1, 0, 0, 0, 12, 200, 1, 0, 0, 0, 14, 207, 1, 0, 0, 0, 16, 213, 1, 0, 0, 0, 18, 233, 1, 0, 0, 0, 20, 235, 1, 0, 0, 0, 22, 250, 1, 0, 0, 0, 24, 253, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 268, 1, 0, 0, 0, 30, 283, 1, 0, 0, 0, 32, 285, 1, 0, 0, 0, 34, 294, 1, 0, 0, 0, 36, 300, 1, 0, 0, 0, 38, 302, 1, 0, 0, 0, 40, 311, 1, 0, 0, 0, 42, 315, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 332, 1, 0, 0, 0, 50, 340, 1, 0, 0, 0, 52, 348, 1, 0, 0, 0, 54, 350, 1, 0, 0, 0, 56, 394, 1, 0, 0, 0, 58, 396, 1, 0, 0, 0, 60, 399, 1, 0, 0, 0, 62, 408, 1, 0, 0, 0, 64, 416, 1, 0, 0, 0, 66, 425, 1, 0, 0, 0, 68, 434, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 453, 1, 0, 0, 0, 76, 457, 1, 0, 0, 0, 78, 460, 1, 0, 0, 0, 80, 468, 1, 0, 0, 0, 82, 472, 1, 0, 0, 0, 84, 476, 1, 0, 0, 0, 86, 479, 1, 0, 0, 0, 88, 484, 1, 0, 0, 0, 90, 488, 1, 0, 0, 0, 92, 490, 1, 0, 0, 0, 94, 492, 1, 0, 0, 0, 96, 495, 1, 0, 0, 0, 98, 499, 1, 0, 0, 0, 100, 502, 1, 0, 0, 0, 102, 505, 1, 0, 0, 0, 104, 525, 1, 0, 0, 0, 106, 107, 3, 2, 1, 0, 107, 108, 5, 0, 0, 1, 108, 1, 1, 0, 0, 0, 109, 110, 6, 1, -1, 0, 110, 111, 3, 4, 2, 0, 111, 117, 1, 0, 0, 0, 112, 113, 10, 1, 0, 0, 113, 114, 5, 26, 0, 0, 114, 116, 3, 6, 3, 0, 115, 112, 1, 0, 0, 0, 116, 119, 1, 0, 0, 0, 117, 115, 1, 0, 0, 0, 117, 118, 1, 0, 0, 0, 118, 3, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 120, 126, 3, 94, 47, 0, 121, 126, 3, 28, 14, 0, 122, 126, 3, 22, 11, 0, 123, 126, 3, 98, 49, 0, 124, 126, 3, 100, 50, 0, 125, 120, 1, 0, 0, 0, 125, 121, 1, 0, 0, 0, 125, 122, 1, 0, 0, 0, 125, 123, 1, 0, 0, 0, 125, 124, 1, 0, 0, 0, 126, 5, 1, 0, 0, 0, 127, 141, 3, 42, 21, 0, 128, 141, 3, 46, 23, 0, 129, 141, 3, 58, 29, 0, 130, 141, 3, 64, 32, 0, 131, 141, 3, 60, 30, 0, 132, 141, 3, 44, 22, 0, 133, 141, 3, 8, 4, 0, 134, 141, 3, 66, 33, 0, 135, 141, 3, 68, 34, 0, 136, 141, 3, 72, 36, 0, 137, 141, 3, 74, 37, 0, 138, 141, 3, 102, 51, 0, 139, 141, 3, 76, 38, 0, 140, 127, 1, 0, 0, 0, 140, 128, 1, 0, 0, 0, 140, 129, 1, 0, 0, 0, 140, 130, 1, 0, 0, 0, 140, 131, 1, 0, 0, 0, 140, 132, 1, 0, 0, 0, 140, 133, 1, 0, 0, 0, 140, 134, 1, 0, 0, 0, 140, 135, 1, 0, 0, 0, 140, 136, 1, 0, 0, 0, 140, 137, 1, 0, 0, 0, 140, 138, 1, 0, 0, 0, 140, 139, 1, 0, 0, 0, 141, 7, 1, 0, 0, 0, 142, 143, 5, 18, 0, 0, 143, 144, 3, 10, 5, 0, 144, 9, 1, 0, 0, 0, 145, 146, 6, 5, -1, 0, 146, 147, 5, 44, 0, 0, 147, 174, 3, 10, 5, 7, 148, 174, 3, 14, 7, 0, 149, 174, 3, 12, 6, 0, 150, 152, 3, 14, 7, 0, 151, 153, 5, 44, 0, 0, 152, 151, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 154, 1, 0, 0, 0, 154, 155, 5, 41, 0, 0, 155, 156, 5, 40, 0, 0, 156, 161, 3, 14, 7, 0, 157, 158, 5, 34, 0, 0, 158, 160, 3, 14, 7, 0, 159, 157, 1, 0, 0, 0, 160, 163, 1, 0, 0, 0, 161, 159, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 164, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 164, 165, 5, 50, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 3, 14, 7, 0, 167, 169, 5, 42, 0, 0, 168, 170, 5, 44, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 5, 45, 0, 0, 172, 174, 1, 0, 0, 0, 173, 145, 1, 0, 0, 0, 173, 148, 1, 0, 0, 0, 173, 149, 1, 0, 0, 0, 173, 150, 1, 0, 0, 0, 173, 166, 1, 0, 0, 0, 174, 183, 1, 0, 0, 0, 175, 176, 10, 4, 0, 0, 176, 177, 5, 31, 0, 0, 177, 182, 3, 10, 5, 5, 178, 179, 10, 3, 0, 0, 179, 180, 5, 47, 0, 0, 180, 182, 3, 10, 5, 4, 181, 175, 1, 0, 0, 0, 181, 178, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 11, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 188, 3, 14, 7, 0, 187, 189, 5, 44, 0, 0, 188, 187, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 191, 5, 43, 0, 0, 191, 192, 3, 90, 45, 0, 192, 201, 1, 0, 0, 0, 193, 195, 3, 14, 7, 0, 194, 196, 5, 44, 0, 0, 195, 194, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 198, 5, 49, 0, 0, 198, 199, 3, 90, 45, 0, 199, 201, 1, 0, 0, 0, 200, 186, 1, 0, 0, 0, 200, 193, 1, 0, 0, 0, 201, 13, 1, 0, 0, 0, 202, 208, 3, 16, 8, 0, 203, 204, 3, 16, 8, 0, 204, 205, 3, 92, 46, 0, 205, 206, 3, 16, 8, 0, 206, 208, 1, 0, 0, 0, 207, 202, 1, 0, 0, 0, 207, 203, 1, 0, 0, 0, 208, 15, 1, 0, 0, 0, 209, 210, 6, 8, -1, 0, 210, 214, 3, 18, 9, 0, 211, 212, 7, 0, 0, 0, 212, 214, 3, 16, 8, 3, 213, 209, 1, 0, 0, 0, 213, 211, 1, 0, 0, 0, 214, 223, 1, 0, 0, 0, 215, 216, 10, 2, 0, 0, 216, 217, 7, 1, 0, 0, 217, 222, 3, 16, 8, 3, 218, 219, 10, 1, 0, 0, 219, 220, 7, 0, 0, 0, 220, 222, 3, 16, 8, 2, 221, 215, 1, 0, 0, 0, 221, 218, 1, 0, 0, 0, 222, 225, 1, 0, 0, 0, 223, 221, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 17, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 226, 234, 3, 56, 28, 0, 227, 234, 3, 48, 24, 0, 228, 234, 3, 20, 10, 0, 229, 230, 5, 40, 0, 0, 230, 231, 3, 10, 5, 0, 231, 232, 5, 50, 0, 0, 232, 234, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 233, 227, 1, 0, 0, 0, 233, 228, 1, 0, 0, 0, 233, 229, 1, 0, 0, 0, 234, 19, 1, 0, 0, 0, 235, 236, 3, 52, 26, 0, 236, 246, 5, 40, 0, 0, 237, 247, 5, 61, 0, 0, 238, 243, 3, 10, 5, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 10, 5, 0, 241, 239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 247, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 237, 1, 0, 0, 0, 246, 238, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 5, 50, 0, 0, 249, 21, 1, 0, 0, 0, 250, 251, 5, 14, 0, 0, 251, 252, 3, 24, 12, 0, 252, 23, 1, 0, 0, 0, 253, 258, 3, 26, 13, 0, 254, 255, 5, 34, 0, 0, 255, 257, 3, 26, 13, 0, 256, 254, 1, 0, 0, 0, 257, 260, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 25, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 261, 267, 3, 10, 5, 0, 262, 263, 3, 48, 24, 0, 263, 264, 5, 33, 0, 0, 264, 265, 3, 10, 5, 0, 265, 267, 1, 0, 0, 0, 266, 261, 1, 0, 0, 0, 266, 262, 1, 0, 0, 0, 267, 27, 1, 0, 0, 0, 268, 269, 5, 6, 0, 0, 269, 274, 3, 30, 15, 0, 270, 271, 5, 34, 0, 0, 271, 273, 3, 30, 15, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 278, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 279, 3, 32, 16, 0, 278, 277, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 282, 3, 36, 18, 0, 281, 280, 1, 0, 0, 0, 281, 282, 1, 0, 0, 0, 282, 29, 1, 0, 0, 0, 283, 284, 7, 2, 0, 0, 284, 31, 1, 0, 0, 0, 285, 286, 5, 71, 0, 0, 286, 291, 3, 34, 17, 0, 287, 288, 5, 34, 0, 0, 288, 290, 3, 34, 17, 0, 289, 287, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 33, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 294, 295, 3, 90, 45, 0, 295, 296, 5, 33, 0, 0, 296, 297, 3, 90, 45, 0, 297, 35, 1, 0, 0, 0, 298, 301, 3, 38, 19, 0, 299, 301, 3, 40, 20, 0, 300, 298, 1, 0, 0, 0, 300, 299, 1, 0, 0, 0, 301, 37, 1, 0, 0, 0, 302, 303, 5, 72, 0, 0, 303, 308, 3, 30, 15, 0, 304, 305, 5, 34, 0, 0, 305, 307, 3, 30, 15, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 39, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 5, 64, 0, 0, 312, 313, 3, 38, 19, 0, 313, 314, 5, 65, 0, 0, 314, 41, 1, 0, 0, 0, 315, 316, 5, 4, 0, 0, 316, 317, 3, 24, 12, 0, 317, 43, 1, 0, 0, 0, 318, 320, 5, 17, 0, 0, 319, 321, 3, 24, 12, 0, 320, 319, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 324, 1, 0, 0, 0, 322, 323, 5, 30, 0, 0, 323, 325, 3, 24, 12, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 45, 1, 0, 0, 0, 326, 327, 5, 8, 0, 0, 327, 330, 3, 24, 12, 0, 328, 329, 5, 30, 0, 0, 329, 331, 3, 24, 12, 0, 330, 328, 1, 0, 0, 0, 330, 331, 1, 0, 0, 0, 331, 47, 1, 0, 0, 0, 332, 337, 3, 52, 26, 0, 333, 334, 5, 36, 0, 0, 334, 336, 3, 52, 26, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 49, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 345, 3, 54, 27, 0, 341, 342, 5, 36, 0, 0, 342, 344, 3, 54, 27, 0, 343, 341, 1, 0, 0, 0, 344, 347, 1, 0, 0, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 51, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 348, 349, 7, 3, 0, 0, 349, 53, 1, 0, 0, 0, 350, 351, 5, 77, 0, 0, 351, 55, 1, 0, 0, 0, 352, 395, 5, 45, 0, 0, 353, 354, 3, 88, 44, 0, 354, 355, 5, 66, 0, 0, 355, 395, 1, 0, 0, 0, 356, 395, 3, 86, 43, 0, 357, 395, 3, 88, 44, 0, 358, 395, 3, 82, 41, 0, 359, 395, 5, 48, 0, 0, 360, 395, 3, 90, 45, 0, 361, 362, 5, 64, 0, 0, 362, 367, 3, 84, 42, 0, 363, 364, 5, 34, 0, 0, 364, 366, 3, 84, 42, 0, 365, 363, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 370, 371, 5, 65, 0, 0, 371, 395, 1, 0, 0, 0, 372, 373, 5, 64, 0, 0, 373, 378, 3, 82, 41, 0, 374, 375, 5, 34, 0, 0, 375, 377, 3, 82, 41, 0, 376, 374, 1, 0, 0, 0, 377, 380, 1, 0, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 381, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 381, 382, 5, 65, 0, 0, 382, 395, 1, 0, 0, 0, 383, 384, 5, 64, 0, 0, 384, 389, 3, 90, 45, 0, 385, 386, 5, 34, 0, 0, 386, 388, 3, 90, 45, 0, 387, 385, 1, 0, 0, 0, 388, 391, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 389, 390, 1, 0, 0, 0, 390, 392, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 392, 393, 5, 65, 0, 0, 393, 395, 1, 0, 0, 0, 394, 352, 1, 0, 0, 0, 394, 353, 1, 0, 0, 0, 394, 356, 1, 0, 0, 0, 394, 357, 1, 0, 0, 0, 394, 358, 1, 0, 0, 0, 394, 359, 1, 0, 0, 0, 394, 360, 1, 0, 0, 0, 394, 361, 1, 0, 0, 0, 394, 372, 1, 0, 0, 0, 394, 383, 1, 0, 0, 0, 395, 57, 1, 0, 0, 0, 396, 397, 5, 10, 0, 0, 397, 398, 5, 28, 0, 0, 398, 59, 1, 0, 0, 0, 399, 400, 5, 16, 0, 0, 400, 405, 3, 62, 31, 0, 401, 402, 5, 34, 0, 0, 402, 404, 3, 62, 31, 0, 403, 401, 1, 0, 0, 0, 404, 407, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 405, 406, 1, 0, 0, 0, 406, 61, 1, 0, 0, 0, 407, 405, 1, 0, 0, 0, 408, 410, 3, 10, 5, 0, 409, 411, 7, 4, 0, 0, 410, 409, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 413, 5, 46, 0, 0, 413, 415, 7, 5, 0, 0, 414, 412, 1, 0, 0, 0, 414, 415, 1, 0, 0, 0, 415, 63, 1, 0, 0, 0, 416, 417, 5, 9, 0, 0, 417, 422, 3, 50, 25, 0, 418, 419, 5, 34, 0, 0, 419, 421, 3, 50, 25, 0, 420, 418, 1, 0, 0, 0, 421, 424, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 65, 1, 0, 0, 0, 424, 422, 1, 0, 0, 0, 425, 426, 5, 2, 0, 0, 426, 431, 3, 50, 25, 0, 427, 428, 5, 34, 0, 0, 428, 430, 3, 50, 25, 0, 429, 427, 1, 0, 0, 0, 430, 433, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 67, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 434, 435, 5, 13, 0, 0, 435, 440, 3, 70, 35, 0, 436, 437, 5, 34, 0, 0, 437, 439, 3, 70, 35, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 69, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 3, 50, 25, 0, 444, 445, 5, 81, 0, 0, 445, 446, 3, 50, 25, 0, 446, 71, 1, 0, 0, 0, 447, 448, 5, 1, 0, 0, 448, 449, 3, 18, 9, 0, 449, 451, 3, 90, 45, 0, 450, 452, 3, 78, 39, 0, 451, 450, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 73, 1, 0, 0, 0, 453, 454, 5, 7, 0, 0, 454, 455, 3, 18, 9, 0, 455, 456, 3, 90, 45, 0, 456, 75, 1, 0, 0, 0, 457, 458, 5, 12, 0, 0, 458, 459, 3, 48, 24, 0, 459, 77, 1, 0, 0, 0, 460, 465, 3, 80, 40, 0, 461, 462, 5, 34, 0, 0, 462, 464, 3, 80, 40, 0, 463, 461, 1, 0, 0, 0, 464, 467, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 79, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 468, 469, 3, 52, 26, 0, 469, 470, 5, 33, 0, 0, 470, 471, 3, 56, 28, 0, 471, 81, 1, 0, 0, 0, 472, 473, 7, 6, 0, 0, 473, 83, 1, 0, 0, 0, 474, 477, 3, 86, 43, 0, 475, 477, 3, 88, 44, 0, 476, 474, 1, 0, 0, 0, 476, 475, 1, 0, 0, 0, 477, 85, 1, 0, 0, 0, 478, 480, 7, 0, 0, 0, 479, 478, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 5, 29, 0, 0, 482, 87, 1, 0, 0, 0, 483, 485, 7, 0, 0, 0, 484, 483, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 5, 28, 0, 0, 487, 89, 1, 0, 0, 0, 488, 489, 5, 27, 0, 0, 489, 91, 1, 0, 0, 0, 490, 491, 7, 7, 0, 0, 491, 93, 1, 0, 0, 0, 492, 493, 5, 5, 0, 0, 493, 494, 3, 96, 48, 0, 494, 95, 1, 0, 0, 0, 495, 496, 5, 64, 0, 0, 496, 497, 3, 2, 1, 0, 497, 498, 5, 65, 0, 0, 498, 97, 1, 0, 0, 0, 499, 500, 5, 15, 0, 0, 500, 501, 5, 97, 0, 0, 501, 99, 1, 0, 0, 0, 502, 503, 5, 11, 0, 0, 503, 504, 5, 101, 0, 0, 504, 101, 1, 0, 0, 0, 505, 506, 5, 3, 0, 0, 506, 509, 5, 87, 0, 0, 507, 508, 5, 85, 0, 0, 508, 510, 3, 50, 25, 0, 509, 507, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 520, 1, 0, 0, 0, 511, 512, 5, 86, 0, 0, 512, 517, 3, 104, 52, 0, 513, 514, 5, 34, 0, 0, 514, 516, 3, 104, 52, 0, 515, 513, 1, 0, 0, 0, 516, 519, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 521, 1, 0, 0, 0, 519, 517, 1, 0, 0, 0, 520, 511, 1, 0, 0, 0, 520, 521, 1, 0, 0, 0, 521, 103, 1, 0, 0, 0, 522, 523, 3, 50, 25, 0, 523, 524, 5, 33, 0, 0, 524, 526, 1, 0, 0, 0, 525, 522, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 3, 50, 25, 0, 528, 105, 1, 0, 0, 0, 51, 117, 125, 140, 152, 161, 169, 173, 181, 183, 188, 195, 200, 207, 213, 221, 223, 233, 243, 246, 258, 266, 274, 278, 281, 291, 300, 308, 320, 324, 330, 337, 345, 367, 378, 389, 394, 405, 410, 414, 422, 431, 440, 451, 465, 476, 479, 484, 509, 517, 520, 525] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 88eaf491ca9d5..bf8b31cab183f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -21,53 +21,55 @@ public class EsqlBaseParser extends Parser { KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, - ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, - LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, - RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, - GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, + PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, + AND=31, ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, + LAST=39, LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, + PARAM=48, RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, + GT=57, GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, - EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, - FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, ID_PATTERN=76, - PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, - AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, - ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, - ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, - ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, - MVEXPAND_WS=95, INFO=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, - SHOW_WS=99, FUNCTIONS=100, META_LINE_COMMENT=101, META_MULTILINE_COMMENT=102, - META_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, - SETTING_WS=108; + EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, OPTIONS=71, METADATA=72, FROM_UNQUOTED_IDENTIFIER=73, + FROM_LINE_COMMENT=74, FROM_MULTILINE_COMMENT=75, FROM_WS=76, ID_PATTERN=77, + PROJECT_LINE_COMMENT=78, PROJECT_MULTILINE_COMMENT=79, PROJECT_WS=80, + AS=81, RENAME_LINE_COMMENT=82, RENAME_MULTILINE_COMMENT=83, RENAME_WS=84, + ON=85, WITH=86, ENRICH_POLICY_NAME=87, ENRICH_LINE_COMMENT=88, ENRICH_MULTILINE_COMMENT=89, + ENRICH_WS=90, ENRICH_FIELD_LINE_COMMENT=91, ENRICH_FIELD_MULTILINE_COMMENT=92, + ENRICH_FIELD_WS=93, MVEXPAND_LINE_COMMENT=94, MVEXPAND_MULTILINE_COMMENT=95, + MVEXPAND_WS=96, INFO=97, SHOW_LINE_COMMENT=98, SHOW_MULTILINE_COMMENT=99, + SHOW_WS=100, FUNCTIONS=101, META_LINE_COMMENT=102, META_MULTILINE_COMMENT=103, + META_WS=104, COLON=105, SETTING=106, SETTING_LINE_COMMENT=107, SETTTING_MULTILINE_COMMENT=108, + SETTING_WS=109; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, RULE_functionExpression = 10, RULE_rowCommand = 11, RULE_fields = 12, - RULE_field = 13, RULE_fromCommand = 14, RULE_metadata = 15, RULE_metadataOption = 16, - RULE_deprecated_metadata = 17, RULE_evalCommand = 18, RULE_statsCommand = 19, - RULE_inlinestatsCommand = 20, RULE_fromIdentifier = 21, RULE_qualifiedName = 22, - RULE_qualifiedNamePattern = 23, RULE_identifier = 24, RULE_identifierPattern = 25, - RULE_constant = 26, RULE_limitCommand = 27, RULE_sortCommand = 28, RULE_orderExpression = 29, - RULE_keepCommand = 30, RULE_dropCommand = 31, RULE_renameCommand = 32, - RULE_renameClause = 33, RULE_dissectCommand = 34, RULE_grokCommand = 35, - RULE_mvExpandCommand = 36, RULE_commandOptions = 37, RULE_commandOption = 38, - RULE_booleanValue = 39, RULE_numericValue = 40, RULE_decimalValue = 41, - RULE_integerValue = 42, RULE_string = 43, RULE_comparisonOperator = 44, - RULE_explainCommand = 45, RULE_subqueryExpression = 46, RULE_showCommand = 47, - RULE_metaCommand = 48, RULE_enrichCommand = 49, RULE_enrichWithClause = 50; + RULE_field = 13, RULE_fromCommand = 14, RULE_fromIdentifier = 15, RULE_fromOptions = 16, + RULE_configOption = 17, RULE_metadata = 18, RULE_metadataOption = 19, + RULE_deprecated_metadata = 20, RULE_evalCommand = 21, RULE_statsCommand = 22, + RULE_inlinestatsCommand = 23, RULE_qualifiedName = 24, RULE_qualifiedNamePattern = 25, + RULE_identifier = 26, RULE_identifierPattern = 27, RULE_constant = 28, + RULE_limitCommand = 29, RULE_sortCommand = 30, RULE_orderExpression = 31, + RULE_keepCommand = 32, RULE_dropCommand = 33, RULE_renameCommand = 34, + RULE_renameClause = 35, RULE_dissectCommand = 36, RULE_grokCommand = 37, + RULE_mvExpandCommand = 38, RULE_commandOptions = 39, RULE_commandOption = 40, + RULE_booleanValue = 41, RULE_numericValue = 42, RULE_decimalValue = 43, + RULE_integerValue = 44, RULE_string = 45, RULE_comparisonOperator = 46, + RULE_explainCommand = 47, RULE_subqueryExpression = 48, RULE_showCommand = 49, + RULE_metaCommand = 50, RULE_enrichCommand = 51, RULE_enrichWithClause = 52; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", "rowCommand", "fields", "field", - "fromCommand", "metadata", "metadataOption", "deprecated_metadata", "evalCommand", - "statsCommand", "inlinestatsCommand", "fromIdentifier", "qualifiedName", - "qualifiedNamePattern", "identifier", "identifierPattern", "constant", - "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", - "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", - "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", - "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "metaCommand", "enrichCommand", "enrichWithClause" + "fromCommand", "fromIdentifier", "fromOptions", "configOption", "metadata", + "metadataOption", "deprecated_metadata", "evalCommand", "statsCommand", + "inlinestatsCommand", "qualifiedName", "qualifiedNamePattern", "identifier", + "identifierPattern", "constant", "limitCommand", "sortCommand", "orderExpression", + "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", + "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", + "booleanValue", "numericValue", "decimalValue", "integerValue", "string", + "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", + "metaCommand", "enrichCommand", "enrichWithClause" }; } public static final String[] ruleNames = makeRuleNames(); @@ -82,10 +84,10 @@ private static String[] makeLiteralNames() { "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'", null, null, - null, "'functions'", null, null, null, "':'" + null, null, null, null, null, "'options'", "'metadata'", null, null, + null, null, null, null, null, null, "'as'", null, null, null, "'on'", + "'with'", null, null, null, null, null, null, null, null, null, null, + "'info'", null, null, null, "'functions'", null, null, null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -95,13 +97,13 @@ private static String[] makeSymbolicNames() { "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", - "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", - "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", - "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", - "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", + "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", + "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", + "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", + "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", + "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", @@ -197,9 +199,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(102); + setState(106); query(0); - setState(103); + setState(107); match(EOF); } } @@ -295,11 +297,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(106); + setState(110); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(113); + setState(117); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -310,16 +312,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(108); + setState(112); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(109); + setState(113); match(PIPE); - setState(110); + setState(114); processingCommand(); } } } - setState(115); + setState(119); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -377,41 +379,41 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(121); + setState(125); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(116); + setState(120); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(117); + setState(121); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(118); + setState(122); rowCommand(); } break; case SHOW: enterOuterAlt(_localctx, 4); { - setState(119); + setState(123); showCommand(); } break; case META: enterOuterAlt(_localctx, 5); { - setState(120); + setState(124); metaCommand(); } break; @@ -495,97 +497,97 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(136); + setState(140); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(123); + setState(127); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(124); + setState(128); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(125); + setState(129); limitCommand(); } break; case KEEP: enterOuterAlt(_localctx, 4); { - setState(126); + setState(130); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(127); + setState(131); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(128); + setState(132); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(129); + setState(133); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(130); + setState(134); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(131); + setState(135); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(132); + setState(136); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(133); + setState(137); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(134); + setState(138); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(135); + setState(139); mvExpandCommand(); } break; @@ -636,9 +638,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(138); + setState(142); match(WHERE); - setState(139); + setState(143); booleanExpression(0); } } @@ -833,7 +835,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(169); + setState(173); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -842,9 +844,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(142); + setState(146); match(NOT); - setState(143); + setState(147); booleanExpression(7); } break; @@ -853,7 +855,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(144); + setState(148); valueExpression(); } break; @@ -862,7 +864,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(145); + setState(149); regexBooleanExpression(); } break; @@ -871,41 +873,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(146); + setState(150); valueExpression(); - setState(148); + setState(152); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(147); + setState(151); match(NOT); } } - setState(150); + setState(154); match(IN); - setState(151); + setState(155); match(LP); - setState(152); + setState(156); valueExpression(); - setState(157); + setState(161); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(153); + setState(157); match(COMMA); - setState(154); + setState(158); valueExpression(); } } - setState(159); + setState(163); _errHandler.sync(this); _la = _input.LA(1); } - setState(160); + setState(164); match(RP); } break; @@ -914,27 +916,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(162); + setState(166); valueExpression(); - setState(163); + setState(167); match(IS); - setState(165); + setState(169); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(164); + setState(168); match(NOT); } } - setState(167); + setState(171); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(179); + setState(183); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -942,7 +944,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(177); + setState(181); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -950,11 +952,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(171); + setState(175); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(172); + setState(176); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(173); + setState(177); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -963,18 +965,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(174); + setState(178); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(175); + setState(179); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(176); + setState(180); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(181); + setState(185); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1029,48 +1031,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(196); + setState(200); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(182); + setState(186); valueExpression(); - setState(184); + setState(188); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(183); + setState(187); match(NOT); } } - setState(186); + setState(190); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(187); + setState(191); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(189); + setState(193); valueExpression(); - setState(191); + setState(195); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(190); + setState(194); match(NOT); } } - setState(193); + setState(197); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(194); + setState(198); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1156,14 +1158,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(203); + setState(207); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(198); + setState(202); operatorExpression(0); } break; @@ -1171,11 +1173,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(199); + setState(203); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(200); + setState(204); comparisonOperator(); - setState(201); + setState(205); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1300,7 +1302,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(209); + setState(213); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1309,7 +1311,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(206); + setState(210); primaryExpression(); } break; @@ -1318,7 +1320,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(207); + setState(211); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1329,13 +1331,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(208); + setState(212); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(219); + setState(223); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1343,7 +1345,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(217); + setState(221); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1351,9 +1353,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(211); + setState(215); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(212); + setState(216); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { @@ -1364,7 +1366,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(213); + setState(217); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1373,9 +1375,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(214); + setState(218); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(215); + setState(219); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1386,14 +1388,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(216); + setState(220); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(221); + setState(225); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1515,14 +1517,14 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); enterRule(_localctx, 18, RULE_primaryExpression); try { - setState(229); + setState(233); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(222); + setState(226); constant(); } break; @@ -1530,7 +1532,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(223); + setState(227); qualifiedName(); } break; @@ -1538,7 +1540,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(224); + setState(228); functionExpression(); } break; @@ -1546,11 +1548,11 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(225); + setState(229); match(LP); - setState(226); + setState(230); booleanExpression(0); - setState(227); + setState(231); match(RP); } break; @@ -1612,20 +1614,20 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(231); + setState(235); identifier(); - setState(232); + setState(236); match(LP); - setState(242); + setState(246); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(233); + setState(237); match(ASTERISK); } break; - case STRING: + case QUOTED_STRING: case INTEGER_LITERAL: case DECIMAL_LITERAL: case FALSE: @@ -1641,21 +1643,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(234); + setState(238); booleanExpression(0); - setState(239); + setState(243); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(235); + setState(239); match(COMMA); - setState(236); + setState(240); booleanExpression(0); } } - setState(241); + setState(245); _errHandler.sync(this); _la = _input.LA(1); } @@ -1667,7 +1669,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(244); + setState(248); match(RP); } } @@ -1714,9 +1716,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(246); + setState(250); match(ROW); - setState(247); + setState(251); fields(); } } @@ -1770,23 +1772,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(249); + setState(253); field(); - setState(254); + setState(258); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(250); + setState(254); match(COMMA); - setState(251); + setState(255); field(); } } } - setState(256); + setState(260); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } @@ -1836,24 +1838,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 26, RULE_field); try { - setState(262); + setState(266); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(257); + setState(261); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(258); + setState(262); qualifiedName(); - setState(259); + setState(263); match(ASSIGN); - setState(260); + setState(264); booleanExpression(0); } break; @@ -1883,6 +1885,9 @@ public FromIdentifierContext fromIdentifier(int i) { public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } + public FromOptionsContext fromOptions() { + return getRuleContext(FromOptionsContext.class,0); + } public MetadataContext metadata() { return getRuleContext(MetadataContext.class,0); } @@ -1913,34 +1918,44 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(264); + setState(268); match(FROM); - setState(265); + setState(269); fromIdentifier(); - setState(270); + setState(274); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(266); + setState(270); match(COMMA); - setState(267); + setState(271); fromIdentifier(); } } } - setState(272); + setState(276); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } - setState(274); + setState(278); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: { - setState(273); + setState(277); + fromOptions(); + } + break; + } + setState(281); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { + case 1: + { + setState(280); metadata(); } break; @@ -1958,6 +1973,189 @@ public final FromCommandContext fromCommand() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class FromIdentifierContext extends ParserRuleContext { + public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } + public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } + @SuppressWarnings("this-escape") + public FromIdentifierContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_fromIdentifier; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFromIdentifier(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFromIdentifier(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFromIdentifier(this); + else return visitor.visitChildren(this); + } + } + + public final FromIdentifierContext fromIdentifier() throws RecognitionException { + FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); + enterRule(_localctx, 30, RULE_fromIdentifier); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(283); + _la = _input.LA(1); + if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class FromOptionsContext extends ParserRuleContext { + public TerminalNode OPTIONS() { return getToken(EsqlBaseParser.OPTIONS, 0); } + public List configOption() { + return getRuleContexts(ConfigOptionContext.class); + } + public ConfigOptionContext configOption(int i) { + return getRuleContext(ConfigOptionContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + @SuppressWarnings("this-escape") + public FromOptionsContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_fromOptions; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFromOptions(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFromOptions(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFromOptions(this); + else return visitor.visitChildren(this); + } + } + + public final FromOptionsContext fromOptions() throws RecognitionException { + FromOptionsContext _localctx = new FromOptionsContext(_ctx, getState()); + enterRule(_localctx, 32, RULE_fromOptions); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(285); + match(OPTIONS); + setState(286); + configOption(); + setState(291); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(287); + match(COMMA); + setState(288); + configOption(); + } + } + } + setState(293); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class ConfigOptionContext extends ParserRuleContext { + public List string() { + return getRuleContexts(StringContext.class); + } + public StringContext string(int i) { + return getRuleContext(StringContext.class,i); + } + public TerminalNode ASSIGN() { return getToken(EsqlBaseParser.ASSIGN, 0); } + @SuppressWarnings("this-escape") + public ConfigOptionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_configOption; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterConfigOption(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitConfigOption(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitConfigOption(this); + else return visitor.visitChildren(this); + } + } + + public final ConfigOptionContext configOption() throws RecognitionException { + ConfigOptionContext _localctx = new ConfigOptionContext(_ctx, getState()); + enterRule(_localctx, 34, RULE_configOption); + try { + enterOuterAlt(_localctx, 1); + { + setState(294); + string(); + setState(295); + match(ASSIGN); + setState(296); + string(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class MetadataContext extends ParserRuleContext { public MetadataOptionContext metadataOption() { @@ -1988,22 +2186,22 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_metadata); + enterRule(_localctx, 36, RULE_metadata); try { - setState(278); + setState(300); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(276); + setState(298); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(277); + setState(299); deprecated_metadata(); } break; @@ -2057,32 +2255,32 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataOptionContext metadataOption() throws RecognitionException { MetadataOptionContext _localctx = new MetadataOptionContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_metadataOption); + enterRule(_localctx, 38, RULE_metadataOption); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(280); + setState(302); match(METADATA); - setState(281); + setState(303); fromIdentifier(); - setState(286); + setState(308); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(282); + setState(304); match(COMMA); - setState(283); + setState(305); fromIdentifier(); } } } - setState(288); + setState(310); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } } } @@ -2125,15 +2323,15 @@ public T accept(ParseTreeVisitor visitor) { public final Deprecated_metadataContext deprecated_metadata() throws RecognitionException { Deprecated_metadataContext _localctx = new Deprecated_metadataContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_deprecated_metadata); + enterRule(_localctx, 40, RULE_deprecated_metadata); try { enterOuterAlt(_localctx, 1); { - setState(289); + setState(311); match(OPENING_BRACKET); - setState(290); + setState(312); metadataOption(); - setState(291); + setState(313); match(CLOSING_BRACKET); } } @@ -2176,13 +2374,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_evalCommand); + enterRule(_localctx, 42, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(293); + setState(315); match(EVAL); - setState(294); + setState(316); fields(); } } @@ -2231,30 +2429,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_statsCommand); + enterRule(_localctx, 44, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(296); + setState(318); match(STATS); - setState(298); + setState(320); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(297); + setState(319); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(302); + setState(324); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(300); + setState(322); match(BY); - setState(301); + setState(323); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2306,22 +2504,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_inlinestatsCommand); + enterRule(_localctx, 46, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(304); + setState(326); match(INLINESTATS); - setState(305); + setState(327); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(308); + setState(330); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(306); + setState(328); match(BY); - setState(307); + setState(329); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2339,60 +2537,6 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx return _localctx; } - @SuppressWarnings("CheckReturnValue") - public static class FromIdentifierContext extends ParserRuleContext { - public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } - public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } - @SuppressWarnings("this-escape") - public FromIdentifierContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_fromIdentifier; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFromIdentifier(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFromIdentifier(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFromIdentifier(this); - else return visitor.visitChildren(this); - } - } - - public final FromIdentifierContext fromIdentifier() throws RecognitionException { - FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_fromIdentifier); - int _la; - try { - enterOuterAlt(_localctx, 1); - { - setState(310); - _la = _input.LA(1); - if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { - _errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - @SuppressWarnings("CheckReturnValue") public static class QualifiedNameContext extends ParserRuleContext { public List identifier() { @@ -2427,30 +2571,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_qualifiedName); + enterRule(_localctx, 48, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(312); + setState(332); identifier(); - setState(317); + setState(337); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,30,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(313); + setState(333); match(DOT); - setState(314); + setState(334); identifier(); } } } - setState(319); + setState(339); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + _alt = getInterpreter().adaptivePredict(_input,30,_ctx); } } } @@ -2499,30 +2643,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_qualifiedNamePattern); + enterRule(_localctx, 50, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(320); + setState(340); identifierPattern(); - setState(325); + setState(345); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,29,_ctx); + _alt = getInterpreter().adaptivePredict(_input,31,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(321); + setState(341); match(DOT); - setState(322); + setState(342); identifierPattern(); } } } - setState(327); + setState(347); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,29,_ctx); + _alt = getInterpreter().adaptivePredict(_input,31,_ctx); } } } @@ -2563,12 +2707,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_identifier); + enterRule(_localctx, 52, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(348); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2616,11 +2760,11 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_identifierPattern); + enterRule(_localctx, 54, RULE_identifierPattern); try { enterOuterAlt(_localctx, 1); { - setState(330); + setState(350); match(ID_PATTERN); } } @@ -2886,17 +3030,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_constant); + enterRule(_localctx, 56, RULE_constant); int _la; try { - setState(374); + setState(394); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(332); + setState(352); match(NULL); } break; @@ -2904,9 +3048,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(333); + setState(353); integerValue(); - setState(334); + setState(354); match(UNQUOTED_IDENTIFIER); } break; @@ -2914,7 +3058,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(336); + setState(356); decimalValue(); } break; @@ -2922,7 +3066,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(337); + setState(357); integerValue(); } break; @@ -2930,7 +3074,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(338); + setState(358); booleanValue(); } break; @@ -2938,7 +3082,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(339); + setState(359); match(PARAM); } break; @@ -2946,7 +3090,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(340); + setState(360); string(); } break; @@ -2954,27 +3098,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(341); + setState(361); match(OPENING_BRACKET); - setState(342); + setState(362); numericValue(); - setState(347); + setState(367); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(343); + setState(363); match(COMMA); - setState(344); + setState(364); numericValue(); } } - setState(349); + setState(369); _errHandler.sync(this); _la = _input.LA(1); } - setState(350); + setState(370); match(CLOSING_BRACKET); } break; @@ -2982,27 +3126,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(352); + setState(372); match(OPENING_BRACKET); - setState(353); + setState(373); booleanValue(); - setState(358); + setState(378); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(354); + setState(374); match(COMMA); - setState(355); + setState(375); booleanValue(); } } - setState(360); + setState(380); _errHandler.sync(this); _la = _input.LA(1); } - setState(361); + setState(381); match(CLOSING_BRACKET); } break; @@ -3010,27 +3154,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(363); + setState(383); match(OPENING_BRACKET); - setState(364); + setState(384); string(); - setState(369); + setState(389); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(365); + setState(385); match(COMMA); - setState(366); + setState(386); string(); } } - setState(371); + setState(391); _errHandler.sync(this); _la = _input.LA(1); } - setState(372); + setState(392); match(CLOSING_BRACKET); } break; @@ -3073,13 +3217,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_limitCommand); + enterRule(_localctx, 58, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(376); + setState(396); match(LIMIT); - setState(377); + setState(397); match(INTEGER_LITERAL); } } @@ -3129,32 +3273,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_sortCommand); + enterRule(_localctx, 60, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(379); + setState(399); match(SORT); - setState(380); + setState(400); orderExpression(); - setState(385); + setState(405); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(381); + setState(401); match(COMMA); - setState(382); + setState(402); orderExpression(); } } } - setState(387); + setState(407); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } } } @@ -3203,19 +3347,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_orderExpression); + enterRule(_localctx, 62, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(388); + setState(408); booleanExpression(0); - setState(390); + setState(410); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: { - setState(389); + setState(409); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3229,14 +3373,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(394); + setState(414); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: { - setState(392); + setState(412); match(NULLS); - setState(393); + setState(413); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3298,32 +3442,32 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_keepCommand); + enterRule(_localctx, 64, RULE_keepCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(396); + setState(416); match(KEEP); - setState(397); + setState(417); qualifiedNamePattern(); - setState(402); + setState(422); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,39,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(398); + setState(418); match(COMMA); - setState(399); + setState(419); qualifiedNamePattern(); } } } - setState(404); + setState(424); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,39,_ctx); } } } @@ -3373,32 +3517,32 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_dropCommand); + enterRule(_localctx, 66, RULE_dropCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(405); + setState(425); match(DROP); - setState(406); + setState(426); qualifiedNamePattern(); - setState(411); + setState(431); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,38,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(407); + setState(427); match(COMMA); - setState(408); + setState(428); qualifiedNamePattern(); } } } - setState(413); + setState(433); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,38,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } } } @@ -3448,32 +3592,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_renameCommand); + enterRule(_localctx, 68, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(414); + setState(434); match(RENAME); - setState(415); + setState(435); renameClause(); - setState(420); + setState(440); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,39,_ctx); + _alt = getInterpreter().adaptivePredict(_input,41,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(416); + setState(436); match(COMMA); - setState(417); + setState(437); renameClause(); } } } - setState(422); + setState(442); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,39,_ctx); + _alt = getInterpreter().adaptivePredict(_input,41,_ctx); } } } @@ -3521,15 +3665,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_renameClause); + enterRule(_localctx, 70, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(423); + setState(443); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(424); + setState(444); match(AS); - setState(425); + setState(445); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3578,22 +3722,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_dissectCommand); + enterRule(_localctx, 72, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(427); + setState(447); match(DISSECT); - setState(428); + setState(448); primaryExpression(); - setState(429); + setState(449); string(); - setState(431); + setState(451); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(430); + setState(450); commandOptions(); } break; @@ -3642,15 +3786,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_grokCommand); + enterRule(_localctx, 74, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(433); + setState(453); match(GROK); - setState(434); + setState(454); primaryExpression(); - setState(435); + setState(455); string(); } } @@ -3693,13 +3837,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_mvExpandCommand); + enterRule(_localctx, 76, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(437); + setState(457); match(MV_EXPAND); - setState(438); + setState(458); qualifiedName(); } } @@ -3748,30 +3892,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_commandOptions); + enterRule(_localctx, 78, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(440); + setState(460); commandOption(); - setState(445); + setState(465); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(441); + setState(461); match(COMMA); - setState(442); + setState(462); commandOption(); } } } - setState(447); + setState(467); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } } } @@ -3817,15 +3961,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_commandOption); + enterRule(_localctx, 80, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(448); + setState(468); identifier(); - setState(449); + setState(469); match(ASSIGN); - setState(450); + setState(470); constant(); } } @@ -3866,12 +4010,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_booleanValue); + enterRule(_localctx, 82, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(452); + setState(472); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -3924,22 +4068,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_numericValue); + enterRule(_localctx, 84, RULE_numericValue); try { - setState(456); + setState(476); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(454); + setState(474); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(455); + setState(475); integerValue(); } break; @@ -3983,17 +4127,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_decimalValue); + enterRule(_localctx, 86, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(459); + setState(479); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(458); + setState(478); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4006,7 +4150,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(461); + setState(481); match(DECIMAL_LITERAL); } } @@ -4048,17 +4192,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_integerValue); + enterRule(_localctx, 88, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(464); + setState(484); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(463); + setState(483); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4071,7 +4215,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(466); + setState(486); match(INTEGER_LITERAL); } } @@ -4088,7 +4232,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class StringContext extends ParserRuleContext { - public TerminalNode STRING() { return getToken(EsqlBaseParser.STRING, 0); } + public TerminalNode QUOTED_STRING() { return getToken(EsqlBaseParser.QUOTED_STRING, 0); } @SuppressWarnings("this-escape") public StringContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -4111,12 +4255,12 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_string); + enterRule(_localctx, 90, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(468); - match(STRING); + setState(488); + match(QUOTED_STRING); } } catch (RecognitionException re) { @@ -4160,12 +4304,12 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_comparisonOperator); + enterRule(_localctx, 92, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(470); + setState(490); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { _errHandler.recoverInline(this); @@ -4216,13 +4360,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_explainCommand); + enterRule(_localctx, 94, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(472); + setState(492); match(EXPLAIN); - setState(473); + setState(493); subqueryExpression(); } } @@ -4266,15 +4410,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_subqueryExpression); + enterRule(_localctx, 96, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(475); + setState(495); match(OPENING_BRACKET); - setState(476); + setState(496); query(0); - setState(477); + setState(497); match(CLOSING_BRACKET); } } @@ -4326,14 +4470,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_showCommand); + enterRule(_localctx, 98, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(479); + setState(499); match(SHOW); - setState(480); + setState(500); match(INFO); } } @@ -4385,14 +4529,14 @@ public T accept(ParseTreeVisitor visitor) { public final MetaCommandContext metaCommand() throws RecognitionException { MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_metaCommand); + enterRule(_localctx, 100, RULE_metaCommand); try { _localctx = new MetaFunctionsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(482); + setState(502); match(META); - setState(483); + setState(503); match(FUNCTIONS); } } @@ -4450,53 +4594,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_enrichCommand); + enterRule(_localctx, 102, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(485); + setState(505); match(ENRICH); - setState(486); + setState(506); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(489); + setState(509); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(487); + setState(507); match(ON); - setState(488); + setState(508); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(500); + setState(520); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(491); + setState(511); match(WITH); - setState(492); + setState(512); enrichWithClause(); - setState(497); + setState(517); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,46,_ctx); + _alt = getInterpreter().adaptivePredict(_input,48,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(493); + setState(513); match(COMMA); - setState(494); + setState(514); enrichWithClause(); } } } - setState(499); + setState(519); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,46,_ctx); + _alt = getInterpreter().adaptivePredict(_input,48,_ctx); } } break; @@ -4547,23 +4691,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_enrichWithClause); + enterRule(_localctx, 104, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(505); + setState(525); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(502); + setState(522); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(503); + setState(523); match(ASSIGN); } break; } - setState(507); + setState(527); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4616,7 +4760,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\u0004\u0001l\u01fe\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001m\u0212\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4630,317 +4774,329 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001p\b\u0001\n\u0001"+ - "\f\u0001s\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0003\u0002z\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0089\b\u0003\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u0095\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u009c"+ - "\b\u0005\n\u0005\f\u0005\u009f\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0003\u0005\u00a6\b\u0005\u0001\u0005\u0001\u0005"+ - "\u0003\u0005\u00aa\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0005\u0005\u00b2\b\u0005\n\u0005\f\u0005\u00b5"+ - "\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00b9\b\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c0\b\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c5\b\u0006\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00cc\b\u0007"+ - "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00d2\b\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0005\b\u00da\b\b\n\b\f\b\u00dd\t\b\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00e6\b\t\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u00ee\b\n\n\n\f\n\u00f1"+ - "\t\n\u0003\n\u00f3\b\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\f\u0001\f\u0001\f\u0005\f\u00fd\b\f\n\f\f\f\u0100\t\f\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0003\r\u0107\b\r\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0005\u000e\u010d\b\u000e\n\u000e\f\u000e\u0110\t\u000e"+ - "\u0001\u000e\u0003\u000e\u0113\b\u000e\u0001\u000f\u0001\u000f\u0003\u000f"+ - "\u0117\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010"+ - "\u011d\b\u0010\n\u0010\f\u0010\u0120\t\u0010\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ - "\u0013\u0003\u0013\u012b\b\u0013\u0001\u0013\u0001\u0013\u0003\u0013\u012f"+ - "\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0003\u0014\u0135"+ - "\b\u0014\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0005"+ - "\u0016\u013c\b\u0016\n\u0016\f\u0016\u013f\t\u0016\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0005\u0017\u0144\b\u0017\n\u0017\f\u0017\u0147\t\u0017\u0001"+ - "\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u015a"+ - "\b\u001a\n\u001a\f\u001a\u015d\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0165\b\u001a\n\u001a"+ - "\f\u001a\u0168\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0005\u001a\u0170\b\u001a\n\u001a\f\u001a\u0173"+ - "\t\u001a\u0001\u001a\u0001\u001a\u0003\u001a\u0177\b\u001a\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0005\u001c\u0180\b\u001c\n\u001c\f\u001c\u0183\t\u001c\u0001\u001d\u0001"+ - "\u001d\u0003\u001d\u0187\b\u001d\u0001\u001d\u0001\u001d\u0003\u001d\u018b"+ - "\b\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0191"+ - "\b\u001e\n\u001e\f\u001e\u0194\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0001\u001f\u0005\u001f\u019a\b\u001f\n\u001f\f\u001f\u019d\t\u001f\u0001"+ - " \u0001 \u0001 \u0001 \u0005 \u01a3\b \n \f \u01a6\t \u0001!\u0001!\u0001"+ - "!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0003\"\u01b0\b\"\u0001#\u0001"+ - "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0005%\u01bc"+ - "\b%\n%\f%\u01bf\t%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001("+ - "\u0001(\u0003(\u01c9\b(\u0001)\u0003)\u01cc\b)\u0001)\u0001)\u0001*\u0003"+ - "*\u01d1\b*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001"+ - "-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u00010\u00010\u0001"+ - "0\u00011\u00011\u00011\u00011\u00031\u01ea\b1\u00011\u00011\u00011\u0001"+ - "1\u00051\u01f0\b1\n1\f1\u01f3\t1\u00031\u01f5\b1\u00012\u00012\u00012"+ - "\u00032\u01fa\b2\u00012\u00012\u00012\u0000\u0003\u0002\n\u00103\u0000"+ - "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c"+ - "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bd\u0000\b\u0001\u0000;<\u0001"+ - "\u0000=?\u0002\u0000CCHH\u0001\u0000BC\u0002\u0000 ##\u0001\u0000&\'"+ - "\u0002\u0000%%33\u0002\u0000446:\u0217\u0000f\u0001\u0000\u0000\u0000"+ - "\u0002i\u0001\u0000\u0000\u0000\u0004y\u0001\u0000\u0000\u0000\u0006\u0088"+ - "\u0001\u0000\u0000\u0000\b\u008a\u0001\u0000\u0000\u0000\n\u00a9\u0001"+ - "\u0000\u0000\u0000\f\u00c4\u0001\u0000\u0000\u0000\u000e\u00cb\u0001\u0000"+ - "\u0000\u0000\u0010\u00d1\u0001\u0000\u0000\u0000\u0012\u00e5\u0001\u0000"+ - "\u0000\u0000\u0014\u00e7\u0001\u0000\u0000\u0000\u0016\u00f6\u0001\u0000"+ - "\u0000\u0000\u0018\u00f9\u0001\u0000\u0000\u0000\u001a\u0106\u0001\u0000"+ - "\u0000\u0000\u001c\u0108\u0001\u0000\u0000\u0000\u001e\u0116\u0001\u0000"+ - "\u0000\u0000 \u0118\u0001\u0000\u0000\u0000\"\u0121\u0001\u0000\u0000"+ - "\u0000$\u0125\u0001\u0000\u0000\u0000&\u0128\u0001\u0000\u0000\u0000("+ - "\u0130\u0001\u0000\u0000\u0000*\u0136\u0001\u0000\u0000\u0000,\u0138\u0001"+ - "\u0000\u0000\u0000.\u0140\u0001\u0000\u0000\u00000\u0148\u0001\u0000\u0000"+ - "\u00002\u014a\u0001\u0000\u0000\u00004\u0176\u0001\u0000\u0000\u00006"+ - "\u0178\u0001\u0000\u0000\u00008\u017b\u0001\u0000\u0000\u0000:\u0184\u0001"+ - "\u0000\u0000\u0000<\u018c\u0001\u0000\u0000\u0000>\u0195\u0001\u0000\u0000"+ - "\u0000@\u019e\u0001\u0000\u0000\u0000B\u01a7\u0001\u0000\u0000\u0000D"+ - "\u01ab\u0001\u0000\u0000\u0000F\u01b1\u0001\u0000\u0000\u0000H\u01b5\u0001"+ - "\u0000\u0000\u0000J\u01b8\u0001\u0000\u0000\u0000L\u01c0\u0001\u0000\u0000"+ - "\u0000N\u01c4\u0001\u0000\u0000\u0000P\u01c8\u0001\u0000\u0000\u0000R"+ - "\u01cb\u0001\u0000\u0000\u0000T\u01d0\u0001\u0000\u0000\u0000V\u01d4\u0001"+ - "\u0000\u0000\u0000X\u01d6\u0001\u0000\u0000\u0000Z\u01d8\u0001\u0000\u0000"+ - "\u0000\\\u01db\u0001\u0000\u0000\u0000^\u01df\u0001\u0000\u0000\u0000"+ - "`\u01e2\u0001\u0000\u0000\u0000b\u01e5\u0001\u0000\u0000\u0000d\u01f9"+ - "\u0001\u0000\u0000\u0000fg\u0003\u0002\u0001\u0000gh\u0005\u0000\u0000"+ - "\u0001h\u0001\u0001\u0000\u0000\u0000ij\u0006\u0001\uffff\uffff\u0000"+ - "jk\u0003\u0004\u0002\u0000kq\u0001\u0000\u0000\u0000lm\n\u0001\u0000\u0000"+ - "mn\u0005\u001a\u0000\u0000np\u0003\u0006\u0003\u0000ol\u0001\u0000\u0000"+ - "\u0000ps\u0001\u0000\u0000\u0000qo\u0001\u0000\u0000\u0000qr\u0001\u0000"+ - "\u0000\u0000r\u0003\u0001\u0000\u0000\u0000sq\u0001\u0000\u0000\u0000"+ - "tz\u0003Z-\u0000uz\u0003\u001c\u000e\u0000vz\u0003\u0016\u000b\u0000w"+ - "z\u0003^/\u0000xz\u0003`0\u0000yt\u0001\u0000\u0000\u0000yu\u0001\u0000"+ - "\u0000\u0000yv\u0001\u0000\u0000\u0000yw\u0001\u0000\u0000\u0000yx\u0001"+ - "\u0000\u0000\u0000z\u0005\u0001\u0000\u0000\u0000{\u0089\u0003$\u0012"+ - "\u0000|\u0089\u0003(\u0014\u0000}\u0089\u00036\u001b\u0000~\u0089\u0003"+ - "<\u001e\u0000\u007f\u0089\u00038\u001c\u0000\u0080\u0089\u0003&\u0013"+ - "\u0000\u0081\u0089\u0003\b\u0004\u0000\u0082\u0089\u0003>\u001f\u0000"+ - "\u0083\u0089\u0003@ \u0000\u0084\u0089\u0003D\"\u0000\u0085\u0089\u0003"+ - "F#\u0000\u0086\u0089\u0003b1\u0000\u0087\u0089\u0003H$\u0000\u0088{\u0001"+ - "\u0000\u0000\u0000\u0088|\u0001\u0000\u0000\u0000\u0088}\u0001\u0000\u0000"+ - "\u0000\u0088~\u0001\u0000\u0000\u0000\u0088\u007f\u0001\u0000\u0000\u0000"+ - "\u0088\u0080\u0001\u0000\u0000\u0000\u0088\u0081\u0001\u0000\u0000\u0000"+ - "\u0088\u0082\u0001\u0000\u0000\u0000\u0088\u0083\u0001\u0000\u0000\u0000"+ - "\u0088\u0084\u0001\u0000\u0000\u0000\u0088\u0085\u0001\u0000\u0000\u0000"+ - "\u0088\u0086\u0001\u0000\u0000\u0000\u0088\u0087\u0001\u0000\u0000\u0000"+ - "\u0089\u0007\u0001\u0000\u0000\u0000\u008a\u008b\u0005\u0012\u0000\u0000"+ - "\u008b\u008c\u0003\n\u0005\u0000\u008c\t\u0001\u0000\u0000\u0000\u008d"+ - "\u008e\u0006\u0005\uffff\uffff\u0000\u008e\u008f\u0005,\u0000\u0000\u008f"+ - "\u00aa\u0003\n\u0005\u0007\u0090\u00aa\u0003\u000e\u0007\u0000\u0091\u00aa"+ - "\u0003\f\u0006\u0000\u0092\u0094\u0003\u000e\u0007\u0000\u0093\u0095\u0005"+ - ",\u0000\u0000\u0094\u0093\u0001\u0000\u0000\u0000\u0094\u0095\u0001\u0000"+ - "\u0000\u0000\u0095\u0096\u0001\u0000\u0000\u0000\u0096\u0097\u0005)\u0000"+ - "\u0000\u0097\u0098\u0005(\u0000\u0000\u0098\u009d\u0003\u000e\u0007\u0000"+ - "\u0099\u009a\u0005\"\u0000\u0000\u009a\u009c\u0003\u000e\u0007\u0000\u009b"+ - "\u0099\u0001\u0000\u0000\u0000\u009c\u009f\u0001\u0000\u0000\u0000\u009d"+ - "\u009b\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000\u0000\u009e"+ - "\u00a0\u0001\u0000\u0000\u0000\u009f\u009d\u0001\u0000\u0000\u0000\u00a0"+ - "\u00a1\u00052\u0000\u0000\u00a1\u00aa\u0001\u0000\u0000\u0000\u00a2\u00a3"+ - "\u0003\u000e\u0007\u0000\u00a3\u00a5\u0005*\u0000\u0000\u00a4\u00a6\u0005"+ - ",\u0000\u0000\u00a5\u00a4\u0001\u0000\u0000\u0000\u00a5\u00a6\u0001\u0000"+ - "\u0000\u0000\u00a6\u00a7\u0001\u0000\u0000\u0000\u00a7\u00a8\u0005-\u0000"+ - "\u0000\u00a8\u00aa\u0001\u0000\u0000\u0000\u00a9\u008d\u0001\u0000\u0000"+ - "\u0000\u00a9\u0090\u0001\u0000\u0000\u0000\u00a9\u0091\u0001\u0000\u0000"+ - "\u0000\u00a9\u0092\u0001\u0000\u0000\u0000\u00a9\u00a2\u0001\u0000\u0000"+ - "\u0000\u00aa\u00b3\u0001\u0000\u0000\u0000\u00ab\u00ac\n\u0004\u0000\u0000"+ - "\u00ac\u00ad\u0005\u001f\u0000\u0000\u00ad\u00b2\u0003\n\u0005\u0005\u00ae"+ - "\u00af\n\u0003\u0000\u0000\u00af\u00b0\u0005/\u0000\u0000\u00b0\u00b2"+ - "\u0003\n\u0005\u0004\u00b1\u00ab\u0001\u0000\u0000\u0000\u00b1\u00ae\u0001"+ - "\u0000\u0000\u0000\u00b2\u00b5\u0001\u0000\u0000\u0000\u00b3\u00b1\u0001"+ - "\u0000\u0000\u0000\u00b3\u00b4\u0001\u0000\u0000\u0000\u00b4\u000b\u0001"+ - "\u0000\u0000\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b8\u0003"+ - "\u000e\u0007\u0000\u00b7\u00b9\u0005,\u0000\u0000\u00b8\u00b7\u0001\u0000"+ - "\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000"+ - "\u0000\u0000\u00ba\u00bb\u0005+\u0000\u0000\u00bb\u00bc\u0003V+\u0000"+ - "\u00bc\u00c5\u0001\u0000\u0000\u0000\u00bd\u00bf\u0003\u000e\u0007\u0000"+ - "\u00be\u00c0\u0005,\u0000\u0000\u00bf\u00be\u0001\u0000\u0000\u0000\u00bf"+ - "\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000\u0000\u00c1"+ - "\u00c2\u00051\u0000\u0000\u00c2\u00c3\u0003V+\u0000\u00c3\u00c5\u0001"+ - "\u0000\u0000\u0000\u00c4\u00b6\u0001\u0000\u0000\u0000\u00c4\u00bd\u0001"+ - "\u0000\u0000\u0000\u00c5\r\u0001\u0000\u0000\u0000\u00c6\u00cc\u0003\u0010"+ - "\b\u0000\u00c7\u00c8\u0003\u0010\b\u0000\u00c8\u00c9\u0003X,\u0000\u00c9"+ - "\u00ca\u0003\u0010\b\u0000\u00ca\u00cc\u0001\u0000\u0000\u0000\u00cb\u00c6"+ - "\u0001\u0000\u0000\u0000\u00cb\u00c7\u0001\u0000\u0000\u0000\u00cc\u000f"+ - "\u0001\u0000\u0000\u0000\u00cd\u00ce\u0006\b\uffff\uffff\u0000\u00ce\u00d2"+ - "\u0003\u0012\t\u0000\u00cf\u00d0\u0007\u0000\u0000\u0000\u00d0\u00d2\u0003"+ - "\u0010\b\u0003\u00d1\u00cd\u0001\u0000\u0000\u0000\u00d1\u00cf\u0001\u0000"+ - "\u0000\u0000\u00d2\u00db\u0001\u0000\u0000\u0000\u00d3\u00d4\n\u0002\u0000"+ - "\u0000\u00d4\u00d5\u0007\u0001\u0000\u0000\u00d5\u00da\u0003\u0010\b\u0003"+ - "\u00d6\u00d7\n\u0001\u0000\u0000\u00d7\u00d8\u0007\u0000\u0000\u0000\u00d8"+ - "\u00da\u0003\u0010\b\u0002\u00d9\u00d3\u0001\u0000\u0000\u0000\u00d9\u00d6"+ - "\u0001\u0000\u0000\u0000\u00da\u00dd\u0001\u0000\u0000\u0000\u00db\u00d9"+ - "\u0001\u0000\u0000\u0000\u00db\u00dc\u0001\u0000\u0000\u0000\u00dc\u0011"+ - "\u0001\u0000\u0000\u0000\u00dd\u00db\u0001\u0000\u0000\u0000\u00de\u00e6"+ - "\u00034\u001a\u0000\u00df\u00e6\u0003,\u0016\u0000\u00e0\u00e6\u0003\u0014"+ - "\n\u0000\u00e1\u00e2\u0005(\u0000\u0000\u00e2\u00e3\u0003\n\u0005\u0000"+ - "\u00e3\u00e4\u00052\u0000\u0000\u00e4\u00e6\u0001\u0000\u0000\u0000\u00e5"+ - "\u00de\u0001\u0000\u0000\u0000\u00e5\u00df\u0001\u0000\u0000\u0000\u00e5"+ - "\u00e0\u0001\u0000\u0000\u0000\u00e5\u00e1\u0001\u0000\u0000\u0000\u00e6"+ - "\u0013\u0001\u0000\u0000\u0000\u00e7\u00e8\u00030\u0018\u0000\u00e8\u00f2"+ - "\u0005(\u0000\u0000\u00e9\u00f3\u0005=\u0000\u0000\u00ea\u00ef\u0003\n"+ - "\u0005\u0000\u00eb\u00ec\u0005\"\u0000\u0000\u00ec\u00ee\u0003\n\u0005"+ - "\u0000\u00ed\u00eb\u0001\u0000\u0000\u0000\u00ee\u00f1\u0001\u0000\u0000"+ - "\u0000\u00ef\u00ed\u0001\u0000\u0000\u0000\u00ef\u00f0\u0001\u0000\u0000"+ - "\u0000\u00f0\u00f3\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000"+ - "\u0000\u00f2\u00e9\u0001\u0000\u0000\u0000\u00f2\u00ea\u0001\u0000\u0000"+ - "\u0000\u00f2\u00f3\u0001\u0000\u0000\u0000\u00f3\u00f4\u0001\u0000\u0000"+ - "\u0000\u00f4\u00f5\u00052\u0000\u0000\u00f5\u0015\u0001\u0000\u0000\u0000"+ - "\u00f6\u00f7\u0005\u000e\u0000\u0000\u00f7\u00f8\u0003\u0018\f\u0000\u00f8"+ - "\u0017\u0001\u0000\u0000\u0000\u00f9\u00fe\u0003\u001a\r\u0000\u00fa\u00fb"+ - "\u0005\"\u0000\u0000\u00fb\u00fd\u0003\u001a\r\u0000\u00fc\u00fa\u0001"+ - "\u0000\u0000\u0000\u00fd\u0100\u0001\u0000\u0000\u0000\u00fe\u00fc\u0001"+ - "\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000\u0000\u0000\u00ff\u0019\u0001"+ - "\u0000\u0000\u0000\u0100\u00fe\u0001\u0000\u0000\u0000\u0101\u0107\u0003"+ - "\n\u0005\u0000\u0102\u0103\u0003,\u0016\u0000\u0103\u0104\u0005!\u0000"+ - "\u0000\u0104\u0105\u0003\n\u0005\u0000\u0105\u0107\u0001\u0000\u0000\u0000"+ - "\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102\u0001\u0000\u0000\u0000"+ - "\u0107\u001b\u0001\u0000\u0000\u0000\u0108\u0109\u0005\u0006\u0000\u0000"+ - "\u0109\u010e\u0003*\u0015\u0000\u010a\u010b\u0005\"\u0000\u0000\u010b"+ - "\u010d\u0003*\u0015\u0000\u010c\u010a\u0001\u0000\u0000\u0000\u010d\u0110"+ - "\u0001\u0000\u0000\u0000\u010e\u010c\u0001\u0000\u0000\u0000\u010e\u010f"+ - "\u0001\u0000\u0000\u0000\u010f\u0112\u0001\u0000\u0000\u0000\u0110\u010e"+ - "\u0001\u0000\u0000\u0000\u0111\u0113\u0003\u001e\u000f\u0000\u0112\u0111"+ - "\u0001\u0000\u0000\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u001d"+ - "\u0001\u0000\u0000\u0000\u0114\u0117\u0003 \u0010\u0000\u0115\u0117\u0003"+ - "\"\u0011\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0115\u0001\u0000"+ - "\u0000\u0000\u0117\u001f\u0001\u0000\u0000\u0000\u0118\u0119\u0005G\u0000"+ - "\u0000\u0119\u011e\u0003*\u0015\u0000\u011a\u011b\u0005\"\u0000\u0000"+ - "\u011b\u011d\u0003*\u0015\u0000\u011c\u011a\u0001\u0000\u0000\u0000\u011d"+ - "\u0120\u0001\u0000\u0000\u0000\u011e\u011c\u0001\u0000\u0000\u0000\u011e"+ - "\u011f\u0001\u0000\u0000\u0000\u011f!\u0001\u0000\u0000\u0000\u0120\u011e"+ - "\u0001\u0000\u0000\u0000\u0121\u0122\u0005@\u0000\u0000\u0122\u0123\u0003"+ - " \u0010\u0000\u0123\u0124\u0005A\u0000\u0000\u0124#\u0001\u0000\u0000"+ - "\u0000\u0125\u0126\u0005\u0004\u0000\u0000\u0126\u0127\u0003\u0018\f\u0000"+ - "\u0127%\u0001\u0000\u0000\u0000\u0128\u012a\u0005\u0011\u0000\u0000\u0129"+ - "\u012b\u0003\u0018\f\u0000\u012a\u0129\u0001\u0000\u0000\u0000\u012a\u012b"+ - "\u0001\u0000\u0000\u0000\u012b\u012e\u0001\u0000\u0000\u0000\u012c\u012d"+ - "\u0005\u001e\u0000\u0000\u012d\u012f\u0003\u0018\f\u0000\u012e\u012c\u0001"+ - "\u0000\u0000\u0000\u012e\u012f\u0001\u0000\u0000\u0000\u012f\'\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0005\b\u0000\u0000\u0131\u0134\u0003\u0018\f"+ - "\u0000\u0132\u0133\u0005\u001e\u0000\u0000\u0133\u0135\u0003\u0018\f\u0000"+ + "2\u00072\u00023\u00073\u00024\u00074\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0005\u0001t\b\u0001\n\u0001\f\u0001w\t\u0001\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002~\b\u0002\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0003\u0003\u008d\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0003\u0005\u0099\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0005\u0005\u00a0\b\u0005\n\u0005\f\u0005\u00a3\t\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00aa"+ + "\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ae\b\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005"+ + "\u00b6\b\u0005\n\u0005\f\u0005\u00b9\t\u0005\u0001\u0006\u0001\u0006\u0003"+ + "\u0006\u00bd\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0003\u0006\u00c4\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ + "\u0006\u00c9\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0003\u0007\u00d0\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0003"+ + "\b\u00d6\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005\b\u00de"+ + "\b\b\n\b\f\b\u00e1\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ + "\u0001\t\u0003\t\u00ea\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u00f2\b\n\n\n\f\n\u00f5\t\n\u0003\n\u00f7\b\n\u0001\n\u0001"+ + "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0005\f"+ + "\u0101\b\f\n\f\f\f\u0104\t\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0003"+ + "\r\u010b\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e"+ + "\u0111\b\u000e\n\u000e\f\u000e\u0114\t\u000e\u0001\u000e\u0003\u000e\u0117"+ + "\b\u000e\u0001\u000e\u0003\u000e\u011a\b\u000e\u0001\u000f\u0001\u000f"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010\u0122\b\u0010"+ + "\n\u0010\f\u0010\u0125\t\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0012\u0001\u0012\u0003\u0012\u012d\b\u0012\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u0133\b\u0013\n\u0013\f\u0013"+ + "\u0136\t\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0003\u0016\u0141\b\u0016"+ + "\u0001\u0016\u0001\u0016\u0003\u0016\u0145\b\u0016\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0003\u0017\u014b\b\u0017\u0001\u0018\u0001\u0018"+ + "\u0001\u0018\u0005\u0018\u0150\b\u0018\n\u0018\f\u0018\u0153\t\u0018\u0001"+ + "\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0158\b\u0019\n\u0019\f\u0019"+ + "\u015b\t\u0019\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0005\u001c\u016e\b\u001c\n\u001c\f\u001c\u0171\t\u001c\u0001\u001c\u0001"+ + "\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0179"+ + "\b\u001c\n\u001c\f\u001c\u017c\t\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0184\b\u001c\n\u001c"+ + "\f\u001c\u0187\t\u001c\u0001\u001c\u0001\u001c\u0003\u001c\u018b\b\u001c"+ + "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0005\u001e\u0194\b\u001e\n\u001e\f\u001e\u0197\t\u001e\u0001"+ + "\u001f\u0001\u001f\u0003\u001f\u019b\b\u001f\u0001\u001f\u0001\u001f\u0003"+ + "\u001f\u019f\b\u001f\u0001 \u0001 \u0001 \u0001 \u0005 \u01a5\b \n \f"+ + " \u01a8\t \u0001!\u0001!\u0001!\u0001!\u0005!\u01ae\b!\n!\f!\u01b1\t!"+ + "\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b7\b\"\n\"\f\"\u01ba\t\"\u0001"+ + "#\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0003$\u01c4\b$\u0001"+ + "%\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0005"+ + "\'\u01d0\b\'\n\'\f\'\u01d3\t\'\u0001(\u0001(\u0001(\u0001(\u0001)\u0001"+ + ")\u0001*\u0001*\u0003*\u01dd\b*\u0001+\u0003+\u01e0\b+\u0001+\u0001+\u0001"+ + ",\u0003,\u01e5\b,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001/\u0001"+ + "/\u0001/\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00012\u0001"+ + "2\u00012\u00013\u00013\u00013\u00013\u00033\u01fe\b3\u00013\u00013\u0001"+ + "3\u00013\u00053\u0204\b3\n3\f3\u0207\t3\u00033\u0209\b3\u00014\u00014"+ + "\u00014\u00034\u020e\b4\u00014\u00014\u00014\u0000\u0003\u0002\n\u0010"+ + "5\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ + "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh\u0000\b\u0001\u0000"+ + ";<\u0001\u0000=?\u0002\u0000CCII\u0001\u0000BC\u0002\u0000 ##\u0001\u0000"+ + "&\'\u0002\u0000%%33\u0002\u0000446:\u022b\u0000j\u0001\u0000\u0000\u0000"+ + "\u0002m\u0001\u0000\u0000\u0000\u0004}\u0001\u0000\u0000\u0000\u0006\u008c"+ + "\u0001\u0000\u0000\u0000\b\u008e\u0001\u0000\u0000\u0000\n\u00ad\u0001"+ + "\u0000\u0000\u0000\f\u00c8\u0001\u0000\u0000\u0000\u000e\u00cf\u0001\u0000"+ + "\u0000\u0000\u0010\u00d5\u0001\u0000\u0000\u0000\u0012\u00e9\u0001\u0000"+ + "\u0000\u0000\u0014\u00eb\u0001\u0000\u0000\u0000\u0016\u00fa\u0001\u0000"+ + "\u0000\u0000\u0018\u00fd\u0001\u0000\u0000\u0000\u001a\u010a\u0001\u0000"+ + "\u0000\u0000\u001c\u010c\u0001\u0000\u0000\u0000\u001e\u011b\u0001\u0000"+ + "\u0000\u0000 \u011d\u0001\u0000\u0000\u0000\"\u0126\u0001\u0000\u0000"+ + "\u0000$\u012c\u0001\u0000\u0000\u0000&\u012e\u0001\u0000\u0000\u0000("+ + "\u0137\u0001\u0000\u0000\u0000*\u013b\u0001\u0000\u0000\u0000,\u013e\u0001"+ + "\u0000\u0000\u0000.\u0146\u0001\u0000\u0000\u00000\u014c\u0001\u0000\u0000"+ + "\u00002\u0154\u0001\u0000\u0000\u00004\u015c\u0001\u0000\u0000\u00006"+ + "\u015e\u0001\u0000\u0000\u00008\u018a\u0001\u0000\u0000\u0000:\u018c\u0001"+ + "\u0000\u0000\u0000<\u018f\u0001\u0000\u0000\u0000>\u0198\u0001\u0000\u0000"+ + "\u0000@\u01a0\u0001\u0000\u0000\u0000B\u01a9\u0001\u0000\u0000\u0000D"+ + "\u01b2\u0001\u0000\u0000\u0000F\u01bb\u0001\u0000\u0000\u0000H\u01bf\u0001"+ + "\u0000\u0000\u0000J\u01c5\u0001\u0000\u0000\u0000L\u01c9\u0001\u0000\u0000"+ + "\u0000N\u01cc\u0001\u0000\u0000\u0000P\u01d4\u0001\u0000\u0000\u0000R"+ + "\u01d8\u0001\u0000\u0000\u0000T\u01dc\u0001\u0000\u0000\u0000V\u01df\u0001"+ + "\u0000\u0000\u0000X\u01e4\u0001\u0000\u0000\u0000Z\u01e8\u0001\u0000\u0000"+ + "\u0000\\\u01ea\u0001\u0000\u0000\u0000^\u01ec\u0001\u0000\u0000\u0000"+ + "`\u01ef\u0001\u0000\u0000\u0000b\u01f3\u0001\u0000\u0000\u0000d\u01f6"+ + "\u0001\u0000\u0000\u0000f\u01f9\u0001\u0000\u0000\u0000h\u020d\u0001\u0000"+ + "\u0000\u0000jk\u0003\u0002\u0001\u0000kl\u0005\u0000\u0000\u0001l\u0001"+ + "\u0001\u0000\u0000\u0000mn\u0006\u0001\uffff\uffff\u0000no\u0003\u0004"+ + "\u0002\u0000ou\u0001\u0000\u0000\u0000pq\n\u0001\u0000\u0000qr\u0005\u001a"+ + "\u0000\u0000rt\u0003\u0006\u0003\u0000sp\u0001\u0000\u0000\u0000tw\u0001"+ + "\u0000\u0000\u0000us\u0001\u0000\u0000\u0000uv\u0001\u0000\u0000\u0000"+ + "v\u0003\u0001\u0000\u0000\u0000wu\u0001\u0000\u0000\u0000x~\u0003^/\u0000"+ + "y~\u0003\u001c\u000e\u0000z~\u0003\u0016\u000b\u0000{~\u0003b1\u0000|"+ + "~\u0003d2\u0000}x\u0001\u0000\u0000\u0000}y\u0001\u0000\u0000\u0000}z"+ + "\u0001\u0000\u0000\u0000}{\u0001\u0000\u0000\u0000}|\u0001\u0000\u0000"+ + "\u0000~\u0005\u0001\u0000\u0000\u0000\u007f\u008d\u0003*\u0015\u0000\u0080"+ + "\u008d\u0003.\u0017\u0000\u0081\u008d\u0003:\u001d\u0000\u0082\u008d\u0003"+ + "@ \u0000\u0083\u008d\u0003<\u001e\u0000\u0084\u008d\u0003,\u0016\u0000"+ + "\u0085\u008d\u0003\b\u0004\u0000\u0086\u008d\u0003B!\u0000\u0087\u008d"+ + "\u0003D\"\u0000\u0088\u008d\u0003H$\u0000\u0089\u008d\u0003J%\u0000\u008a"+ + "\u008d\u0003f3\u0000\u008b\u008d\u0003L&\u0000\u008c\u007f\u0001\u0000"+ + "\u0000\u0000\u008c\u0080\u0001\u0000\u0000\u0000\u008c\u0081\u0001\u0000"+ + "\u0000\u0000\u008c\u0082\u0001\u0000\u0000\u0000\u008c\u0083\u0001\u0000"+ + "\u0000\u0000\u008c\u0084\u0001\u0000\u0000\u0000\u008c\u0085\u0001\u0000"+ + "\u0000\u0000\u008c\u0086\u0001\u0000\u0000\u0000\u008c\u0087\u0001\u0000"+ + "\u0000\u0000\u008c\u0088\u0001\u0000\u0000\u0000\u008c\u0089\u0001\u0000"+ + "\u0000\u0000\u008c\u008a\u0001\u0000\u0000\u0000\u008c\u008b\u0001\u0000"+ + "\u0000\u0000\u008d\u0007\u0001\u0000\u0000\u0000\u008e\u008f\u0005\u0012"+ + "\u0000\u0000\u008f\u0090\u0003\n\u0005\u0000\u0090\t\u0001\u0000\u0000"+ + "\u0000\u0091\u0092\u0006\u0005\uffff\uffff\u0000\u0092\u0093\u0005,\u0000"+ + "\u0000\u0093\u00ae\u0003\n\u0005\u0007\u0094\u00ae\u0003\u000e\u0007\u0000"+ + "\u0095\u00ae\u0003\f\u0006\u0000\u0096\u0098\u0003\u000e\u0007\u0000\u0097"+ + "\u0099\u0005,\u0000\u0000\u0098\u0097\u0001\u0000\u0000\u0000\u0098\u0099"+ + "\u0001\u0000\u0000\u0000\u0099\u009a\u0001\u0000\u0000\u0000\u009a\u009b"+ + "\u0005)\u0000\u0000\u009b\u009c\u0005(\u0000\u0000\u009c\u00a1\u0003\u000e"+ + "\u0007\u0000\u009d\u009e\u0005\"\u0000\u0000\u009e\u00a0\u0003\u000e\u0007"+ + "\u0000\u009f\u009d\u0001\u0000\u0000\u0000\u00a0\u00a3\u0001\u0000\u0000"+ + "\u0000\u00a1\u009f\u0001\u0000\u0000\u0000\u00a1\u00a2\u0001\u0000\u0000"+ + "\u0000\u00a2\u00a4\u0001\u0000\u0000\u0000\u00a3\u00a1\u0001\u0000\u0000"+ + "\u0000\u00a4\u00a5\u00052\u0000\u0000\u00a5\u00ae\u0001\u0000\u0000\u0000"+ + "\u00a6\u00a7\u0003\u000e\u0007\u0000\u00a7\u00a9\u0005*\u0000\u0000\u00a8"+ + "\u00aa\u0005,\u0000\u0000\u00a9\u00a8\u0001\u0000\u0000\u0000\u00a9\u00aa"+ + "\u0001\u0000\u0000\u0000\u00aa\u00ab\u0001\u0000\u0000\u0000\u00ab\u00ac"+ + "\u0005-\u0000\u0000\u00ac\u00ae\u0001\u0000\u0000\u0000\u00ad\u0091\u0001"+ + "\u0000\u0000\u0000\u00ad\u0094\u0001\u0000\u0000\u0000\u00ad\u0095\u0001"+ + "\u0000\u0000\u0000\u00ad\u0096\u0001\u0000\u0000\u0000\u00ad\u00a6\u0001"+ + "\u0000\u0000\u0000\u00ae\u00b7\u0001\u0000\u0000\u0000\u00af\u00b0\n\u0004"+ + "\u0000\u0000\u00b0\u00b1\u0005\u001f\u0000\u0000\u00b1\u00b6\u0003\n\u0005"+ + "\u0005\u00b2\u00b3\n\u0003\u0000\u0000\u00b3\u00b4\u0005/\u0000\u0000"+ + "\u00b4\u00b6\u0003\n\u0005\u0004\u00b5\u00af\u0001\u0000\u0000\u0000\u00b5"+ + "\u00b2\u0001\u0000\u0000\u0000\u00b6\u00b9\u0001\u0000\u0000\u0000\u00b7"+ + "\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000\u00b8"+ + "\u000b\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000\u00ba"+ + "\u00bc\u0003\u000e\u0007\u0000\u00bb\u00bd\u0005,\u0000\u0000\u00bc\u00bb"+ + "\u0001\u0000\u0000\u0000\u00bc\u00bd\u0001\u0000\u0000\u0000\u00bd\u00be"+ + "\u0001\u0000\u0000\u0000\u00be\u00bf\u0005+\u0000\u0000\u00bf\u00c0\u0003"+ + "Z-\u0000\u00c0\u00c9\u0001\u0000\u0000\u0000\u00c1\u00c3\u0003\u000e\u0007"+ + "\u0000\u00c2\u00c4\u0005,\u0000\u0000\u00c3\u00c2\u0001\u0000\u0000\u0000"+ + "\u00c3\u00c4\u0001\u0000\u0000\u0000\u00c4\u00c5\u0001\u0000\u0000\u0000"+ + "\u00c5\u00c6\u00051\u0000\u0000\u00c6\u00c7\u0003Z-\u0000\u00c7\u00c9"+ + "\u0001\u0000\u0000\u0000\u00c8\u00ba\u0001\u0000\u0000\u0000\u00c8\u00c1"+ + "\u0001\u0000\u0000\u0000\u00c9\r\u0001\u0000\u0000\u0000\u00ca\u00d0\u0003"+ + "\u0010\b\u0000\u00cb\u00cc\u0003\u0010\b\u0000\u00cc\u00cd\u0003\\.\u0000"+ + "\u00cd\u00ce\u0003\u0010\b\u0000\u00ce\u00d0\u0001\u0000\u0000\u0000\u00cf"+ + "\u00ca\u0001\u0000\u0000\u0000\u00cf\u00cb\u0001\u0000\u0000\u0000\u00d0"+ + "\u000f\u0001\u0000\u0000\u0000\u00d1\u00d2\u0006\b\uffff\uffff\u0000\u00d2"+ + "\u00d6\u0003\u0012\t\u0000\u00d3\u00d4\u0007\u0000\u0000\u0000\u00d4\u00d6"+ + "\u0003\u0010\b\u0003\u00d5\u00d1\u0001\u0000\u0000\u0000\u00d5\u00d3\u0001"+ + "\u0000\u0000\u0000\u00d6\u00df\u0001\u0000\u0000\u0000\u00d7\u00d8\n\u0002"+ + "\u0000\u0000\u00d8\u00d9\u0007\u0001\u0000\u0000\u00d9\u00de\u0003\u0010"+ + "\b\u0003\u00da\u00db\n\u0001\u0000\u0000\u00db\u00dc\u0007\u0000\u0000"+ + "\u0000\u00dc\u00de\u0003\u0010\b\u0002\u00dd\u00d7\u0001\u0000\u0000\u0000"+ + "\u00dd\u00da\u0001\u0000\u0000\u0000\u00de\u00e1\u0001\u0000\u0000\u0000"+ + "\u00df\u00dd\u0001\u0000\u0000\u0000\u00df\u00e0\u0001\u0000\u0000\u0000"+ + "\u00e0\u0011\u0001\u0000\u0000\u0000\u00e1\u00df\u0001\u0000\u0000\u0000"+ + "\u00e2\u00ea\u00038\u001c\u0000\u00e3\u00ea\u00030\u0018\u0000\u00e4\u00ea"+ + "\u0003\u0014\n\u0000\u00e5\u00e6\u0005(\u0000\u0000\u00e6\u00e7\u0003"+ + "\n\u0005\u0000\u00e7\u00e8\u00052\u0000\u0000\u00e8\u00ea\u0001\u0000"+ + "\u0000\u0000\u00e9\u00e2\u0001\u0000\u0000\u0000\u00e9\u00e3\u0001\u0000"+ + "\u0000\u0000\u00e9\u00e4\u0001\u0000\u0000\u0000\u00e9\u00e5\u0001\u0000"+ + "\u0000\u0000\u00ea\u0013\u0001\u0000\u0000\u0000\u00eb\u00ec\u00034\u001a"+ + "\u0000\u00ec\u00f6\u0005(\u0000\u0000\u00ed\u00f7\u0005=\u0000\u0000\u00ee"+ + "\u00f3\u0003\n\u0005\u0000\u00ef\u00f0\u0005\"\u0000\u0000\u00f0\u00f2"+ + "\u0003\n\u0005\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000\u00f2\u00f5\u0001"+ + "\u0000\u0000\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f3\u00f4\u0001"+ + "\u0000\u0000\u0000\u00f4\u00f7\u0001\u0000\u0000\u0000\u00f5\u00f3\u0001"+ + "\u0000\u0000\u0000\u00f6\u00ed\u0001\u0000\u0000\u0000\u00f6\u00ee\u0001"+ + "\u0000\u0000\u0000\u00f6\u00f7\u0001\u0000\u0000\u0000\u00f7\u00f8\u0001"+ + "\u0000\u0000\u0000\u00f8\u00f9\u00052\u0000\u0000\u00f9\u0015\u0001\u0000"+ + "\u0000\u0000\u00fa\u00fb\u0005\u000e\u0000\u0000\u00fb\u00fc\u0003\u0018"+ + "\f\u0000\u00fc\u0017\u0001\u0000\u0000\u0000\u00fd\u0102\u0003\u001a\r"+ + "\u0000\u00fe\u00ff\u0005\"\u0000\u0000\u00ff\u0101\u0003\u001a\r\u0000"+ + "\u0100\u00fe\u0001\u0000\u0000\u0000\u0101\u0104\u0001\u0000\u0000\u0000"+ + "\u0102\u0100\u0001\u0000\u0000\u0000\u0102\u0103\u0001\u0000\u0000\u0000"+ + "\u0103\u0019\u0001\u0000\u0000\u0000\u0104\u0102\u0001\u0000\u0000\u0000"+ + "\u0105\u010b\u0003\n\u0005\u0000\u0106\u0107\u00030\u0018\u0000\u0107"+ + "\u0108\u0005!\u0000\u0000\u0108\u0109\u0003\n\u0005\u0000\u0109\u010b"+ + "\u0001\u0000\u0000\u0000\u010a\u0105\u0001\u0000\u0000\u0000\u010a\u0106"+ + "\u0001\u0000\u0000\u0000\u010b\u001b\u0001\u0000\u0000\u0000\u010c\u010d"+ + "\u0005\u0006\u0000\u0000\u010d\u0112\u0003\u001e\u000f\u0000\u010e\u010f"+ + "\u0005\"\u0000\u0000\u010f\u0111\u0003\u001e\u000f\u0000\u0110\u010e\u0001"+ + "\u0000\u0000\u0000\u0111\u0114\u0001\u0000\u0000\u0000\u0112\u0110\u0001"+ + "\u0000\u0000\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u0116\u0001"+ + "\u0000\u0000\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0117\u0003"+ + " \u0010\u0000\u0116\u0115\u0001\u0000\u0000\u0000\u0116\u0117\u0001\u0000"+ + "\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u011a\u0003$\u0012"+ + "\u0000\u0119\u0118\u0001\u0000\u0000\u0000\u0119\u011a\u0001\u0000\u0000"+ + "\u0000\u011a\u001d\u0001\u0000\u0000\u0000\u011b\u011c\u0007\u0002\u0000"+ + "\u0000\u011c\u001f\u0001\u0000\u0000\u0000\u011d\u011e\u0005G\u0000\u0000"+ + "\u011e\u0123\u0003\"\u0011\u0000\u011f\u0120\u0005\"\u0000\u0000\u0120"+ + "\u0122\u0003\"\u0011\u0000\u0121\u011f\u0001\u0000\u0000\u0000\u0122\u0125"+ + "\u0001\u0000\u0000\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0123\u0124"+ + "\u0001\u0000\u0000\u0000\u0124!\u0001\u0000\u0000\u0000\u0125\u0123\u0001"+ + "\u0000\u0000\u0000\u0126\u0127\u0003Z-\u0000\u0127\u0128\u0005!\u0000"+ + "\u0000\u0128\u0129\u0003Z-\u0000\u0129#\u0001\u0000\u0000\u0000\u012a"+ + "\u012d\u0003&\u0013\u0000\u012b\u012d\u0003(\u0014\u0000\u012c\u012a\u0001"+ + "\u0000\u0000\u0000\u012c\u012b\u0001\u0000\u0000\u0000\u012d%\u0001\u0000"+ + "\u0000\u0000\u012e\u012f\u0005H\u0000\u0000\u012f\u0134\u0003\u001e\u000f"+ + "\u0000\u0130\u0131\u0005\"\u0000\u0000\u0131\u0133\u0003\u001e\u000f\u0000"+ + "\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0136\u0001\u0000\u0000\u0000"+ "\u0134\u0132\u0001\u0000\u0000\u0000\u0134\u0135\u0001\u0000\u0000\u0000"+ - "\u0135)\u0001\u0000\u0000\u0000\u0136\u0137\u0007\u0002\u0000\u0000\u0137"+ - "+\u0001\u0000\u0000\u0000\u0138\u013d\u00030\u0018\u0000\u0139\u013a\u0005"+ - "$\u0000\u0000\u013a\u013c\u00030\u0018\u0000\u013b\u0139\u0001\u0000\u0000"+ - "\u0000\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000"+ - "\u0000\u013d\u013e\u0001\u0000\u0000\u0000\u013e-\u0001\u0000\u0000\u0000"+ - "\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0145\u00032\u0019\u0000\u0141"+ - "\u0142\u0005$\u0000\u0000\u0142\u0144\u00032\u0019\u0000\u0143\u0141\u0001"+ - "\u0000\u0000\u0000\u0144\u0147\u0001\u0000\u0000\u0000\u0145\u0143\u0001"+ - "\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146/\u0001\u0000"+ - "\u0000\u0000\u0147\u0145\u0001\u0000\u0000\u0000\u0148\u0149\u0007\u0003"+ - "\u0000\u0000\u01491\u0001\u0000\u0000\u0000\u014a\u014b\u0005L\u0000\u0000"+ - "\u014b3\u0001\u0000\u0000\u0000\u014c\u0177\u0005-\u0000\u0000\u014d\u014e"+ - "\u0003T*\u0000\u014e\u014f\u0005B\u0000\u0000\u014f\u0177\u0001\u0000"+ - "\u0000\u0000\u0150\u0177\u0003R)\u0000\u0151\u0177\u0003T*\u0000\u0152"+ - "\u0177\u0003N\'\u0000\u0153\u0177\u00050\u0000\u0000\u0154\u0177\u0003"+ - "V+\u0000\u0155\u0156\u0005@\u0000\u0000\u0156\u015b\u0003P(\u0000\u0157"+ - "\u0158\u0005\"\u0000\u0000\u0158\u015a\u0003P(\u0000\u0159\u0157\u0001"+ - "\u0000\u0000\u0000\u015a\u015d\u0001\u0000\u0000\u0000\u015b\u0159\u0001"+ - "\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000\u0000\u015c\u015e\u0001"+ - "\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015e\u015f\u0005"+ - "A\u0000\u0000\u015f\u0177\u0001\u0000\u0000\u0000\u0160\u0161\u0005@\u0000"+ - "\u0000\u0161\u0166\u0003N\'\u0000\u0162\u0163\u0005\"\u0000\u0000\u0163"+ - "\u0165\u0003N\'\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0165\u0168"+ - "\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0166\u0167"+ - "\u0001\u0000\u0000\u0000\u0167\u0169\u0001\u0000\u0000\u0000\u0168\u0166"+ - "\u0001\u0000\u0000\u0000\u0169\u016a\u0005A\u0000\u0000\u016a\u0177\u0001"+ - "\u0000\u0000\u0000\u016b\u016c\u0005@\u0000\u0000\u016c\u0171\u0003V+"+ - "\u0000\u016d\u016e\u0005\"\u0000\u0000\u016e\u0170\u0003V+\u0000\u016f"+ - "\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001\u0000\u0000\u0000\u0171"+ - "\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000\u0000\u0172"+ - "\u0174\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000\u0000\u0000\u0174"+ - "\u0175\u0005A\u0000\u0000\u0175\u0177\u0001\u0000\u0000\u0000\u0176\u014c"+ - "\u0001\u0000\u0000\u0000\u0176\u014d\u0001\u0000\u0000\u0000\u0176\u0150"+ - "\u0001\u0000\u0000\u0000\u0176\u0151\u0001\u0000\u0000\u0000\u0176\u0152"+ - "\u0001\u0000\u0000\u0000\u0176\u0153\u0001\u0000\u0000\u0000\u0176\u0154"+ - "\u0001\u0000\u0000\u0000\u0176\u0155\u0001\u0000\u0000\u0000\u0176\u0160"+ - "\u0001\u0000\u0000\u0000\u0176\u016b\u0001\u0000\u0000\u0000\u01775\u0001"+ - "\u0000\u0000\u0000\u0178\u0179\u0005\n\u0000\u0000\u0179\u017a\u0005\u001c"+ - "\u0000\u0000\u017a7\u0001\u0000\u0000\u0000\u017b\u017c\u0005\u0010\u0000"+ - "\u0000\u017c\u0181\u0003:\u001d\u0000\u017d\u017e\u0005\"\u0000\u0000"+ - "\u017e\u0180\u0003:\u001d\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180"+ - "\u0183\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0181"+ - "\u0182\u0001\u0000\u0000\u0000\u01829\u0001\u0000\u0000\u0000\u0183\u0181"+ - "\u0001\u0000\u0000\u0000\u0184\u0186\u0003\n\u0005\u0000\u0185\u0187\u0007"+ - "\u0004\u0000\u0000\u0186\u0185\u0001\u0000\u0000\u0000\u0186\u0187\u0001"+ - "\u0000\u0000\u0000\u0187\u018a\u0001\u0000\u0000\u0000\u0188\u0189\u0005"+ - ".\u0000\u0000\u0189\u018b\u0007\u0005\u0000\u0000\u018a\u0188\u0001\u0000"+ - "\u0000\u0000\u018a\u018b\u0001\u0000\u0000\u0000\u018b;\u0001\u0000\u0000"+ - "\u0000\u018c\u018d\u0005\t\u0000\u0000\u018d\u0192\u0003.\u0017\u0000"+ - "\u018e\u018f\u0005\"\u0000\u0000\u018f\u0191\u0003.\u0017\u0000\u0190"+ - "\u018e\u0001\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192"+ - "\u0190\u0001\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ - "=\u0001\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0196"+ - "\u0005\u0002\u0000\u0000\u0196\u019b\u0003.\u0017\u0000\u0197\u0198\u0005"+ - "\"\u0000\u0000\u0198\u019a\u0003.\u0017\u0000\u0199\u0197\u0001\u0000"+ - "\u0000\u0000\u019a\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001\u0000"+ - "\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000\u019c?\u0001\u0000\u0000"+ - "\u0000\u019d\u019b\u0001\u0000\u0000\u0000\u019e\u019f\u0005\r\u0000\u0000"+ - "\u019f\u01a4\u0003B!\u0000\u01a0\u01a1\u0005\"\u0000\u0000\u01a1\u01a3"+ - "\u0003B!\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a3\u01a6\u0001\u0000"+ - "\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000\u01a4\u01a5\u0001\u0000"+ - "\u0000\u0000\u01a5A\u0001\u0000\u0000\u0000\u01a6\u01a4\u0001\u0000\u0000"+ - "\u0000\u01a7\u01a8\u0003.\u0017\u0000\u01a8\u01a9\u0005P\u0000\u0000\u01a9"+ - "\u01aa\u0003.\u0017\u0000\u01aaC\u0001\u0000\u0000\u0000\u01ab\u01ac\u0005"+ - "\u0001\u0000\u0000\u01ac\u01ad\u0003\u0012\t\u0000\u01ad\u01af\u0003V"+ - "+\u0000\u01ae\u01b0\u0003J%\u0000\u01af\u01ae\u0001\u0000\u0000\u0000"+ - "\u01af\u01b0\u0001\u0000\u0000\u0000\u01b0E\u0001\u0000\u0000\u0000\u01b1"+ - "\u01b2\u0005\u0007\u0000\u0000\u01b2\u01b3\u0003\u0012\t\u0000\u01b3\u01b4"+ - "\u0003V+\u0000\u01b4G\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005\f\u0000"+ - "\u0000\u01b6\u01b7\u0003,\u0016\u0000\u01b7I\u0001\u0000\u0000\u0000\u01b8"+ - "\u01bd\u0003L&\u0000\u01b9\u01ba\u0005\"\u0000\u0000\u01ba\u01bc\u0003"+ - "L&\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bc\u01bf\u0001\u0000\u0000"+ - "\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01bd\u01be\u0001\u0000\u0000"+ - "\u0000\u01beK\u0001\u0000\u0000\u0000\u01bf\u01bd\u0001\u0000\u0000\u0000"+ - "\u01c0\u01c1\u00030\u0018\u0000\u01c1\u01c2\u0005!\u0000\u0000\u01c2\u01c3"+ - "\u00034\u001a\u0000\u01c3M\u0001\u0000\u0000\u0000\u01c4\u01c5\u0007\u0006"+ - "\u0000\u0000\u01c5O\u0001\u0000\u0000\u0000\u01c6\u01c9\u0003R)\u0000"+ - "\u01c7\u01c9\u0003T*\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c8\u01c7"+ - "\u0001\u0000\u0000\u0000\u01c9Q\u0001\u0000\u0000\u0000\u01ca\u01cc\u0007"+ - "\u0000\u0000\u0000\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001"+ - "\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005"+ - "\u001d\u0000\u0000\u01ceS\u0001\u0000\u0000\u0000\u01cf\u01d1\u0007\u0000"+ - "\u0000\u0000\u01d0\u01cf\u0001\u0000\u0000\u0000\u01d0\u01d1\u0001\u0000"+ - "\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u001c"+ - "\u0000\u0000\u01d3U\u0001\u0000\u0000\u0000\u01d4\u01d5\u0005\u001b\u0000"+ - "\u0000\u01d5W\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0007\u0000\u0000"+ - "\u01d7Y\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005\u0005\u0000\u0000\u01d9"+ - "\u01da\u0003\\.\u0000\u01da[\u0001\u0000\u0000\u0000\u01db\u01dc\u0005"+ - "@\u0000\u0000\u01dc\u01dd\u0003\u0002\u0001\u0000\u01dd\u01de\u0005A\u0000"+ - "\u0000\u01de]\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u000f\u0000\u0000"+ - "\u01e0\u01e1\u0005`\u0000\u0000\u01e1_\u0001\u0000\u0000\u0000\u01e2\u01e3"+ - "\u0005\u000b\u0000\u0000\u01e3\u01e4\u0005d\u0000\u0000\u01e4a\u0001\u0000"+ - "\u0000\u0000\u01e5\u01e6\u0005\u0003\u0000\u0000\u01e6\u01e9\u0005V\u0000"+ - "\u0000\u01e7\u01e8\u0005T\u0000\u0000\u01e8\u01ea\u0003.\u0017\u0000\u01e9"+ - "\u01e7\u0001\u0000\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea"+ - "\u01f4\u0001\u0000\u0000\u0000\u01eb\u01ec\u0005U\u0000\u0000\u01ec\u01f1"+ - "\u0003d2\u0000\u01ed\u01ee\u0005\"\u0000\u0000\u01ee\u01f0\u0003d2\u0000"+ - "\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001\u0000\u0000\u0000"+ - "\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000"+ - "\u01f2\u01f5\u0001\u0000\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000"+ - "\u01f4\u01eb\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000"+ - "\u01f5c\u0001\u0000\u0000\u0000\u01f6\u01f7\u0003.\u0017\u0000\u01f7\u01f8"+ - "\u0005!\u0000\u0000\u01f8\u01fa\u0001\u0000\u0000\u0000\u01f9\u01f6\u0001"+ - "\u0000\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001"+ - "\u0000\u0000\u0000\u01fb\u01fc\u0003.\u0017\u0000\u01fce\u0001\u0000\u0000"+ - "\u00001qy\u0088\u0094\u009d\u00a5\u00a9\u00b1\u00b3\u00b8\u00bf\u00c4"+ - "\u00cb\u00d1\u00d9\u00db\u00e5\u00ef\u00f2\u00fe\u0106\u010e\u0112\u0116"+ - "\u011e\u012a\u012e\u0134\u013d\u0145\u015b\u0166\u0171\u0176\u0181\u0186"+ - "\u018a\u0192\u019b\u01a4\u01af\u01bd\u01c8\u01cb\u01d0\u01e9\u01f1\u01f4"+ - "\u01f9"; + "\u0135\'\u0001\u0000\u0000\u0000\u0136\u0134\u0001\u0000\u0000\u0000\u0137"+ + "\u0138\u0005@\u0000\u0000\u0138\u0139\u0003&\u0013\u0000\u0139\u013a\u0005"+ + "A\u0000\u0000\u013a)\u0001\u0000\u0000\u0000\u013b\u013c\u0005\u0004\u0000"+ + "\u0000\u013c\u013d\u0003\u0018\f\u0000\u013d+\u0001\u0000\u0000\u0000"+ + "\u013e\u0140\u0005\u0011\u0000\u0000\u013f\u0141\u0003\u0018\f\u0000\u0140"+ + "\u013f\u0001\u0000\u0000\u0000\u0140\u0141\u0001\u0000\u0000\u0000\u0141"+ + "\u0144\u0001\u0000\u0000\u0000\u0142\u0143\u0005\u001e\u0000\u0000\u0143"+ + "\u0145\u0003\u0018\f\u0000\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145"+ + "\u0001\u0000\u0000\u0000\u0145-\u0001\u0000\u0000\u0000\u0146\u0147\u0005"+ + "\b\u0000\u0000\u0147\u014a\u0003\u0018\f\u0000\u0148\u0149\u0005\u001e"+ + "\u0000\u0000\u0149\u014b\u0003\u0018\f\u0000\u014a\u0148\u0001\u0000\u0000"+ + "\u0000\u014a\u014b\u0001\u0000\u0000\u0000\u014b/\u0001\u0000\u0000\u0000"+ + "\u014c\u0151\u00034\u001a\u0000\u014d\u014e\u0005$\u0000\u0000\u014e\u0150"+ + "\u00034\u001a\u0000\u014f\u014d\u0001\u0000\u0000\u0000\u0150\u0153\u0001"+ + "\u0000\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0151\u0152\u0001"+ + "\u0000\u0000\u0000\u01521\u0001\u0000\u0000\u0000\u0153\u0151\u0001\u0000"+ + "\u0000\u0000\u0154\u0159\u00036\u001b\u0000\u0155\u0156\u0005$\u0000\u0000"+ + "\u0156\u0158\u00036\u001b\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0158"+ + "\u015b\u0001\u0000\u0000\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u0159"+ + "\u015a\u0001\u0000\u0000\u0000\u015a3\u0001\u0000\u0000\u0000\u015b\u0159"+ + "\u0001\u0000\u0000\u0000\u015c\u015d\u0007\u0003\u0000\u0000\u015d5\u0001"+ + "\u0000\u0000\u0000\u015e\u015f\u0005M\u0000\u0000\u015f7\u0001\u0000\u0000"+ + "\u0000\u0160\u018b\u0005-\u0000\u0000\u0161\u0162\u0003X,\u0000\u0162"+ + "\u0163\u0005B\u0000\u0000\u0163\u018b\u0001\u0000\u0000\u0000\u0164\u018b"+ + "\u0003V+\u0000\u0165\u018b\u0003X,\u0000\u0166\u018b\u0003R)\u0000\u0167"+ + "\u018b\u00050\u0000\u0000\u0168\u018b\u0003Z-\u0000\u0169\u016a\u0005"+ + "@\u0000\u0000\u016a\u016f\u0003T*\u0000\u016b\u016c\u0005\"\u0000\u0000"+ + "\u016c\u016e\u0003T*\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016e\u0171"+ + "\u0001\u0000\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u016f\u0170"+ + "\u0001\u0000\u0000\u0000\u0170\u0172\u0001\u0000\u0000\u0000\u0171\u016f"+ + "\u0001\u0000\u0000\u0000\u0172\u0173\u0005A\u0000\u0000\u0173\u018b\u0001"+ + "\u0000\u0000\u0000\u0174\u0175\u0005@\u0000\u0000\u0175\u017a\u0003R)"+ + "\u0000\u0176\u0177\u0005\"\u0000\u0000\u0177\u0179\u0003R)\u0000\u0178"+ + "\u0176\u0001\u0000\u0000\u0000\u0179\u017c\u0001\u0000\u0000\u0000\u017a"+ + "\u0178\u0001\u0000\u0000\u0000\u017a\u017b\u0001\u0000\u0000\u0000\u017b"+ + "\u017d\u0001\u0000\u0000\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017d"+ + "\u017e\u0005A\u0000\u0000\u017e\u018b\u0001\u0000\u0000\u0000\u017f\u0180"+ + "\u0005@\u0000\u0000\u0180\u0185\u0003Z-\u0000\u0181\u0182\u0005\"\u0000"+ + "\u0000\u0182\u0184\u0003Z-\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0184"+ + "\u0187\u0001\u0000\u0000\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0185"+ + "\u0186\u0001\u0000\u0000\u0000\u0186\u0188\u0001\u0000\u0000\u0000\u0187"+ + "\u0185\u0001\u0000\u0000\u0000\u0188\u0189\u0005A\u0000\u0000\u0189\u018b"+ + "\u0001\u0000\u0000\u0000\u018a\u0160\u0001\u0000\u0000\u0000\u018a\u0161"+ + "\u0001\u0000\u0000\u0000\u018a\u0164\u0001\u0000\u0000\u0000\u018a\u0165"+ + "\u0001\u0000\u0000\u0000\u018a\u0166\u0001\u0000\u0000\u0000\u018a\u0167"+ + "\u0001\u0000\u0000\u0000\u018a\u0168\u0001\u0000\u0000\u0000\u018a\u0169"+ + "\u0001\u0000\u0000\u0000\u018a\u0174\u0001\u0000\u0000\u0000\u018a\u017f"+ + "\u0001\u0000\u0000\u0000\u018b9\u0001\u0000\u0000\u0000\u018c\u018d\u0005"+ + "\n\u0000\u0000\u018d\u018e\u0005\u001c\u0000\u0000\u018e;\u0001\u0000"+ + "\u0000\u0000\u018f\u0190\u0005\u0010\u0000\u0000\u0190\u0195\u0003>\u001f"+ + "\u0000\u0191\u0192\u0005\"\u0000\u0000\u0192\u0194\u0003>\u001f\u0000"+ + "\u0193\u0191\u0001\u0000\u0000\u0000\u0194\u0197\u0001\u0000\u0000\u0000"+ + "\u0195\u0193\u0001\u0000\u0000\u0000\u0195\u0196\u0001\u0000\u0000\u0000"+ + "\u0196=\u0001\u0000\u0000\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0198"+ + "\u019a\u0003\n\u0005\u0000\u0199\u019b\u0007\u0004\u0000\u0000\u019a\u0199"+ + "\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000\u0000\u0000\u019b\u019e"+ + "\u0001\u0000\u0000\u0000\u019c\u019d\u0005.\u0000\u0000\u019d\u019f\u0007"+ + "\u0005\u0000\u0000\u019e\u019c\u0001\u0000\u0000\u0000\u019e\u019f\u0001"+ + "\u0000\u0000\u0000\u019f?\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005\t"+ + "\u0000\u0000\u01a1\u01a6\u00032\u0019\u0000\u01a2\u01a3\u0005\"\u0000"+ + "\u0000\u01a3\u01a5\u00032\u0019\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ + "\u01a5\u01a8\u0001\u0000\u0000\u0000\u01a6\u01a4\u0001\u0000\u0000\u0000"+ + "\u01a6\u01a7\u0001\u0000\u0000\u0000\u01a7A\u0001\u0000\u0000\u0000\u01a8"+ + "\u01a6\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005\u0002\u0000\u0000\u01aa"+ + "\u01af\u00032\u0019\u0000\u01ab\u01ac\u0005\"\u0000\u0000\u01ac\u01ae"+ + "\u00032\u0019\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ae\u01b1\u0001"+ + "\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01af\u01b0\u0001"+ + "\u0000\u0000\u0000\u01b0C\u0001\u0000\u0000\u0000\u01b1\u01af\u0001\u0000"+ + "\u0000\u0000\u01b2\u01b3\u0005\r\u0000\u0000\u01b3\u01b8\u0003F#\u0000"+ + "\u01b4\u01b5\u0005\"\u0000\u0000\u01b5\u01b7\u0003F#\u0000\u01b6\u01b4"+ + "\u0001\u0000\u0000\u0000\u01b7\u01ba\u0001\u0000\u0000\u0000\u01b8\u01b6"+ + "\u0001\u0000\u0000\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9E\u0001"+ + "\u0000\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01bc\u0003"+ + "2\u0019\u0000\u01bc\u01bd\u0005Q\u0000\u0000\u01bd\u01be\u00032\u0019"+ + "\u0000\u01beG\u0001\u0000\u0000\u0000\u01bf\u01c0\u0005\u0001\u0000\u0000"+ + "\u01c0\u01c1\u0003\u0012\t\u0000\u01c1\u01c3\u0003Z-\u0000\u01c2\u01c4"+ + "\u0003N\'\u0000\u01c3\u01c2\u0001\u0000\u0000\u0000\u01c3\u01c4\u0001"+ + "\u0000\u0000\u0000\u01c4I\u0001\u0000\u0000\u0000\u01c5\u01c6\u0005\u0007"+ + "\u0000\u0000\u01c6\u01c7\u0003\u0012\t\u0000\u01c7\u01c8\u0003Z-\u0000"+ + "\u01c8K\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005\f\u0000\u0000\u01ca"+ + "\u01cb\u00030\u0018\u0000\u01cbM\u0001\u0000\u0000\u0000\u01cc\u01d1\u0003"+ + "P(\u0000\u01cd\u01ce\u0005\"\u0000\u0000\u01ce\u01d0\u0003P(\u0000\u01cf"+ + "\u01cd\u0001\u0000\u0000\u0000\u01d0\u01d3\u0001\u0000\u0000\u0000\u01d1"+ + "\u01cf\u0001\u0000\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2"+ + "O\u0001\u0000\u0000\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d4\u01d5"+ + "\u00034\u001a\u0000\u01d5\u01d6\u0005!\u0000\u0000\u01d6\u01d7\u00038"+ + "\u001c\u0000\u01d7Q\u0001\u0000\u0000\u0000\u01d8\u01d9\u0007\u0006\u0000"+ + "\u0000\u01d9S\u0001\u0000\u0000\u0000\u01da\u01dd\u0003V+\u0000\u01db"+ + "\u01dd\u0003X,\u0000\u01dc\u01da\u0001\u0000\u0000\u0000\u01dc\u01db\u0001"+ + "\u0000\u0000\u0000\u01ddU\u0001\u0000\u0000\u0000\u01de\u01e0\u0007\u0000"+ + "\u0000\u0000\u01df\u01de\u0001\u0000\u0000\u0000\u01df\u01e0\u0001\u0000"+ + "\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01e2\u0005\u001d"+ + "\u0000\u0000\u01e2W\u0001\u0000\u0000\u0000\u01e3\u01e5\u0007\u0000\u0000"+ + "\u0000\u01e4\u01e3\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000"+ + "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005\u001c\u0000"+ + "\u0000\u01e7Y\u0001\u0000\u0000\u0000\u01e8\u01e9\u0005\u001b\u0000\u0000"+ + "\u01e9[\u0001\u0000\u0000\u0000\u01ea\u01eb\u0007\u0007\u0000\u0000\u01eb"+ + "]\u0001\u0000\u0000\u0000\u01ec\u01ed\u0005\u0005\u0000\u0000\u01ed\u01ee"+ + "\u0003`0\u0000\u01ee_\u0001\u0000\u0000\u0000\u01ef\u01f0\u0005@\u0000"+ + "\u0000\u01f0\u01f1\u0003\u0002\u0001\u0000\u01f1\u01f2\u0005A\u0000\u0000"+ + "\u01f2a\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000f\u0000\u0000\u01f4"+ + "\u01f5\u0005a\u0000\u0000\u01f5c\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005"+ + "\u000b\u0000\u0000\u01f7\u01f8\u0005e\u0000\u0000\u01f8e\u0001\u0000\u0000"+ + "\u0000\u01f9\u01fa\u0005\u0003\u0000\u0000\u01fa\u01fd\u0005W\u0000\u0000"+ + "\u01fb\u01fc\u0005U\u0000\u0000\u01fc\u01fe\u00032\u0019\u0000\u01fd\u01fb"+ + "\u0001\u0000\u0000\u0000\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe\u0208"+ + "\u0001\u0000\u0000\u0000\u01ff\u0200\u0005V\u0000\u0000\u0200\u0205\u0003"+ + "h4\u0000\u0201\u0202\u0005\"\u0000\u0000\u0202\u0204\u0003h4\u0000\u0203"+ + "\u0201\u0001\u0000\u0000\u0000\u0204\u0207\u0001\u0000\u0000\u0000\u0205"+ + "\u0203\u0001\u0000\u0000\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206"+ + "\u0209\u0001\u0000\u0000\u0000\u0207\u0205\u0001\u0000\u0000\u0000\u0208"+ + "\u01ff\u0001\u0000\u0000\u0000\u0208\u0209\u0001\u0000\u0000\u0000\u0209"+ + "g\u0001\u0000\u0000\u0000\u020a\u020b\u00032\u0019\u0000\u020b\u020c\u0005"+ + "!\u0000\u0000\u020c\u020e\u0001\u0000\u0000\u0000\u020d\u020a\u0001\u0000"+ + "\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000\u020e\u020f\u0001\u0000"+ + "\u0000\u0000\u020f\u0210\u00032\u0019\u0000\u0210i\u0001\u0000\u0000\u0000"+ + "3u}\u008c\u0098\u00a1\u00a9\u00ad\u00b5\u00b7\u00bc\u00c3\u00c8\u00cf"+ + "\u00d5\u00dd\u00df\u00e9\u00f3\u00f6\u0102\u010a\u0112\u0116\u0119\u0123"+ + "\u012c\u0134\u0140\u0144\u014a\u0151\u0159\u016f\u017a\u0185\u018a\u0195"+ + "\u019a\u019e\u01a6\u01af\u01b8\u01c3\u01d1\u01dc\u01df\u01e4\u01fd\u0205"+ + "\u0208\u020d"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 6737e782025b2..2d5954517d717 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -336,6 +336,42 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitFromCommand(EsqlBaseParser.FromCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterFromOptions(EsqlBaseParser.FromOptionsContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitFromOptions(EsqlBaseParser.FromOptionsContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterConfigOption(EsqlBaseParser.ConfigOptionContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitConfigOption(EsqlBaseParser.ConfigOptionContext ctx) { } /** * {@inheritDoc} * @@ -408,18 +444,6 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 95502053521d6..9bbc672e4d51f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -207,49 +207,63 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitMetadata(EsqlBaseParser.MetadataContext ctx) { return visitChildren(ctx); } + @Override public T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitMetadataOption(EsqlBaseParser.MetadataOptionContext ctx) { return visitChildren(ctx); } + @Override public T visitFromOptions(EsqlBaseParser.FromOptionsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { return visitChildren(ctx); } + @Override public T visitConfigOption(EsqlBaseParser.ConfigOptionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitEvalCommand(EsqlBaseParser.EvalCommandContext ctx) { return visitChildren(ctx); } + @Override public T visitMetadata(EsqlBaseParser.MetadataContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { return visitChildren(ctx); } + @Override public T visitMetadataOption(EsqlBaseParser.MetadataOptionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { return visitChildren(ctx); } + @Override public T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { return visitChildren(ctx); } + @Override public T visitEvalCommand(EsqlBaseParser.EvalCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 433eba1a14999..c80b7e5dd878e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -311,6 +311,36 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitFromCommand(EsqlBaseParser.FromCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * @param ctx the parse tree + */ + void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * @param ctx the parse tree + */ + void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#fromOptions}. + * @param ctx the parse tree + */ + void enterFromOptions(EsqlBaseParser.FromOptionsContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#fromOptions}. + * @param ctx the parse tree + */ + void exitFromOptions(EsqlBaseParser.FromOptionsContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#configOption}. + * @param ctx the parse tree + */ + void enterConfigOption(EsqlBaseParser.ConfigOptionContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#configOption}. + * @param ctx the parse tree + */ + void exitConfigOption(EsqlBaseParser.ConfigOptionContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#metadata}. * @param ctx the parse tree @@ -371,16 +401,6 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); - /** - * Enter a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. - * @param ctx the parse tree - */ - void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); - /** - * Exit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. - * @param ctx the parse tree - */ - void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#qualifiedName}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 323eb46a42fda..09da2cb9c3ddb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -189,6 +189,24 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitFromCommand(EsqlBaseParser.FromCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#fromOptions}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFromOptions(EsqlBaseParser.FromOptionsContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#configOption}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitConfigOption(EsqlBaseParser.ConfigOptionContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#metadata}. * @param ctx the parse tree @@ -225,12 +243,6 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); - /** - * Visit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#qualifiedName}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 64ce1633e8772..b942ccbfb8872 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedStar; +import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.parser.ParserUtils; import org.elasticsearch.xpack.ql.plan.TableIdentifier; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; @@ -210,7 +211,21 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { } } } - return new EsqlUnresolvedRelation(source, table, Arrays.asList(metadataMap.values().toArray(Attribute[]::new))); + EsSourceOptions esSourceOptions = new EsSourceOptions(); + if (ctx.fromOptions() != null) { + for (var o : ctx.fromOptions().configOption()) { + var nameContext = o.string().get(0); + String name = visitString(nameContext).fold().toString(); + String value = visitString(o.string().get(1)).fold().toString(); + try { + esSourceOptions.addOption(name, value); + } catch (IllegalArgumentException iae) { + var cause = iae.getCause() != null ? ". " + iae.getCause().getMessage() : ""; + throw new ParsingException(iae, source(nameContext), "invalid options provided: " + iae.getMessage() + cause); + } + } + } + return new EsqlUnresolvedRelation(source, table, Arrays.asList(metadataMap.values().toArray(Attribute[]::new)), esSourceOptions); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlUnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlUnresolvedRelation.java index 718c4e0049c9b..01c29cbce123a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlUnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsqlUnresolvedRelation.java @@ -8,32 +8,55 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.plan.TableIdentifier; import org.elasticsearch.xpack.ql.plan.logical.UnresolvedRelation; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; +import java.util.Objects; public class EsqlUnresolvedRelation extends UnresolvedRelation { private final List metadataFields; - - public EsqlUnresolvedRelation(Source source, TableIdentifier table, List metadataFields, String unresolvedMessage) { + private final EsSourceOptions esSourceOptions; + + public EsqlUnresolvedRelation( + Source source, + TableIdentifier table, + List metadataFields, + EsSourceOptions esSourceOptions, + String unresolvedMessage + ) { super(source, table, "", false, unresolvedMessage); this.metadataFields = metadataFields; + Objects.requireNonNull(esSourceOptions); + this.esSourceOptions = esSourceOptions; + } + + public EsqlUnresolvedRelation(Source source, TableIdentifier table, List metadataFields, String unresolvedMessage) { + this(source, table, metadataFields, EsSourceOptions.NO_OPTIONS, unresolvedMessage); + } + + public EsqlUnresolvedRelation(Source source, TableIdentifier table, List metadataFields, EsSourceOptions esSourceOptions) { + this(source, table, metadataFields, esSourceOptions, null); } public EsqlUnresolvedRelation(Source source, TableIdentifier table, List metadataFields) { - this(source, table, metadataFields, null); + this(source, table, metadataFields, EsSourceOptions.NO_OPTIONS, null); } public List metadataFields() { return metadataFields; } + public EsSourceOptions esSourceOptions() { + return esSourceOptions; + } + @Override protected NodeInfo info() { - return NodeInfo.create(this, EsqlUnresolvedRelation::new, table(), metadataFields(), unresolvedMessage()); + return NodeInfo.create(this, EsqlUnresolvedRelation::new, table(), metadataFields(), esSourceOptions(), unresolvedMessage()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index ffc7195fe0a23..f8fd284bbd558 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; +import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.tree.Source; @@ -182,6 +183,12 @@ static QueryBuilder detectFilter(PhysicalPlan plan, String fieldName, Predicate< return Queries.combine(FILTER, asList(requestFilter)); } + public static EsSourceOptions esSourceOptions(PhysicalPlan plan) { + Holder holder = new Holder<>(); + plan.forEachUp(FragmentExec.class, f -> f.fragment().forEachUp(EsRelation.class, r -> holder.set(r.esSourceOptions()))); + return holder.get(); + } + /** * Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only. * This specifically excludes spatial data types, which are not themselves sortable. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 90cbc018b77dc..8fae3c09f32d5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -70,6 +70,7 @@ import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.options.EsSourceOptions; import java.util.ArrayList; import java.util.Collections; @@ -293,35 +294,51 @@ private void startComputeOnDataNodes( // Since it's used only for @timestamp, it is relatively safe to assume it's not needed // but it would be better to have a proper impl. QueryBuilder requestFilter = PlannerUtils.requestFilter(dataNodePlan, x -> true); - lookupDataNodes(parentTask, clusterAlias, requestFilter, concreteIndices, originalIndices, ActionListener.wrap(dataNodes -> { - try (RefCountingRunnable refs = new RefCountingRunnable(() -> parentListener.onResponse(null))) { - // For each target node, first open a remote exchange on the remote node, then link the exchange source to - // the new remote exchange sink, and initialize the computation on the target node via data-node-request. - for (DataNode node : dataNodes) { - var dataNodeListener = ActionListener.releaseAfter(dataNodeListenerSupplier.get(), refs.acquire()); - var queryPragmas = configuration.pragmas(); - ExchangeService.openExchange( - transportService, - node.connection, - sessionId, - queryPragmas.exchangeBufferSize(), - esqlExecutor, - dataNodeListener.delegateFailureAndWrap((delegate, unused) -> { - var remoteSink = exchangeService.newRemoteSink(parentTask, sessionId, transportService, node.connection); - exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); - transportService.sendChildRequest( - node.connection, - DATA_ACTION_NAME, - new DataNodeRequest(sessionId, configuration, clusterAlias, node.shardIds, node.aliasFilters, dataNodePlan), - parentTask, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(delegate, ComputeResponse::new, esqlExecutor) - ); - }) - ); + EsSourceOptions esSourceOptions = PlannerUtils.esSourceOptions(dataNodePlan); + lookupDataNodes( + parentTask, + clusterAlias, + requestFilter, + concreteIndices, + originalIndices, + esSourceOptions, + ActionListener.wrap(dataNodes -> { + try (RefCountingRunnable refs = new RefCountingRunnable(() -> parentListener.onResponse(null))) { + // For each target node, first open a remote exchange on the remote node, then link the exchange source to + // the new remote exchange sink, and initialize the computation on the target node via data-node-request. + for (DataNode node : dataNodes) { + var dataNodeListener = ActionListener.releaseAfter(dataNodeListenerSupplier.get(), refs.acquire()); + var queryPragmas = configuration.pragmas(); + ExchangeService.openExchange( + transportService, + node.connection, + sessionId, + queryPragmas.exchangeBufferSize(), + esqlExecutor, + dataNodeListener.delegateFailureAndWrap((delegate, unused) -> { + var remoteSink = exchangeService.newRemoteSink(parentTask, sessionId, transportService, node.connection); + exchangeSource.addRemoteSink(remoteSink, queryPragmas.concurrentExchangeClients()); + transportService.sendChildRequest( + node.connection, + DATA_ACTION_NAME, + new DataNodeRequest( + sessionId, + configuration, + clusterAlias, + node.shardIds, + node.aliasFilters, + dataNodePlan + ), + parentTask, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(delegate, ComputeResponse::new, esqlExecutor) + ); + }) + ); + } } - } - }, parentListener::onFailure)); + }, parentListener::onFailure) + ); } private void startComputeOnRemoteClusters( @@ -518,12 +535,13 @@ record RemoteCluster(String clusterAlias, Transport.Connection connection, Strin * Ideally, the search_shards API should be called before the field-caps API; however, this can lead * to a situation where the column structure (i.e., matched data types) differs depending on the query. */ - void lookupDataNodes( + private void lookupDataNodes( Task parentTask, String clusterAlias, QueryBuilder filter, Set concreteIndices, String[] originalIndices, + EsSourceOptions esSourceOptions, ActionListener> listener ) { ThreadContext threadContext = transportService.getThreadPool().getThreadContext(); @@ -567,10 +585,10 @@ void lookupDataNodes( threadContext.markAsSystemContext(); SearchShardsRequest searchShardsRequest = new SearchShardsRequest( originalIndices, - SearchRequest.DEFAULT_INDICES_OPTIONS, + esSourceOptions.indicesOptions(SearchRequest.DEFAULT_INDICES_OPTIONS), filter, null, - null, + esSourceOptions.preference(), false, clusterAlias ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 17f262143f57a..95e275a2d0333 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -70,9 +70,14 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature ASYNC_QUERY = new NodeFeature("esql.async_query"); + /** + * Does ESQL support FROM OPTIONS? + */ + public static final NodeFeature FROM_OPTIONS = new NodeFeature("esql.from_options"); + @Override public Set getFeatures() { - return Set.of(ASYNC_QUERY, AGG_VALUES, MV_SORT, DISABLE_NULLABLE_OPTS, ST_X_Y); + return Set.of(ASYNC_QUERY, AGG_VALUES, MV_SORT, DISABLE_NULLABLE_OPTS, ST_X_Y, FROM_OPTIONS); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlIndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlIndexResolver.java index b573de7cc3435..ad9902a91d002 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlIndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlIndexResolver.java @@ -11,13 +11,13 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.fieldcaps.IndexFieldCapabilities; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; -import org.elasticsearch.xpack.ql.index.IndexResolver; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypeRegistry; import org.elasticsearch.xpack.ql.type.DateEsField; @@ -55,9 +55,14 @@ public EsqlIndexResolver(Client client, DataTypeRegistry typeRegistry) { /** * Resolves a pattern to one (potentially compound meaning that spawns multiple indices) mapping. */ - public void resolveAsMergedMapping(String indexWildcard, Set fieldNames, ActionListener listener) { + public void resolveAsMergedMapping( + String indexWildcard, + Set fieldNames, + IndicesOptions indicesOptions, + ActionListener listener + ) { client.fieldCaps( - createFieldCapsRequest(indexWildcard, fieldNames), + createFieldCapsRequest(indexWildcard, fieldNames, indicesOptions), listener.delegateFailureAndWrap((l, response) -> l.onResponse(mergedMappings(indexWildcard, response))) ); } @@ -239,13 +244,13 @@ private EsField conflictingMetricTypes(String name, String fullName, FieldCapabi return new InvalidMappedField(name, "mapped as different metric types in indices: " + indices); } - private static FieldCapabilitiesRequest createFieldCapsRequest(String index, Set fieldNames) { + private static FieldCapabilitiesRequest createFieldCapsRequest(String index, Set fieldNames, IndicesOptions indicesOptions) { FieldCapabilitiesRequest req = new FieldCapabilitiesRequest().indices(Strings.commaDelimitedListToStringArray(index)); req.fields(fieldNames.toArray(String[]::new)); req.includeUnmapped(true); // lenient because we throw our own errors looking at the response e.g. if something was not resolved // also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable - req.indicesOptions(IndexResolver.FIELD_CAPS_INDICES_OPTIONS); + req.indicesOptions(indicesOptions); req.setMergeResults(false); return req; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 683460243ecbd..bbf16fc946999 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.core.Assertions; @@ -207,11 +208,13 @@ private void preAnalyzeIndices(LogicalPlan parsed, ActionListener void preAnalyzeIndices(LogicalPlan parsed, ActionListener fieldNames, + IndicesOptions indicesOptions, ActionListener listener ) { indexResolver.resolveAsMergedMapping(indexWildcard, fieldNames, false, Map.of(), new ActionListener<>() { @Override public void onResponse(IndexResolution fromQl) { - esqlIndexResolver.resolveAsMergedMapping(indexWildcard, fieldNames, new ActionListener<>() { + esqlIndexResolver.resolveAsMergedMapping(indexWildcard, fieldNames, indicesOptions, new ActionListener<>() { @Override public void onResponse(IndexResolution fromEsql) { if (fromQl.isValid() == false) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 6f3991a0e8323..7af93adc301d2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -86,6 +86,7 @@ import org.elasticsearch.xpack.ql.expression.predicate.operator.arithmetic.ArithmeticOperation; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.index.EsIndex; +import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; @@ -466,7 +467,7 @@ public void testDissectParserSimple() throws IOException { } public void testEsRelation() throws IOException { - var orig = new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomBoolean()); + var orig = new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomEsSourceOptions(), randomBoolean()); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); PlanNamedTypes.writeEsRelation(out, orig); @@ -477,7 +478,7 @@ public void testEsRelation() throws IOException { public void testEsqlProject() throws IOException { var orig = new EsqlProject( Source.EMPTY, - new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomBoolean()), + new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomEsSourceOptions(), randomBoolean()), List.of(randomFieldAttribute()) ); BytesStreamOutput bso = new BytesStreamOutput(); @@ -488,7 +489,13 @@ public void testEsqlProject() throws IOException { } public void testMvExpand() throws IOException { - var esRelation = new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomBoolean()); + var esRelation = new EsRelation( + Source.EMPTY, + randomEsIndex(), + List.of(randomFieldAttribute()), + randomEsSourceOptions(), + randomBoolean() + ); var orig = new MvExpand(Source.EMPTY, esRelation, randomFieldAttribute(), randomFieldAttribute()); BytesStreamOutput bso = new BytesStreamOutput(); PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); @@ -685,6 +692,31 @@ static Map randomProperties(int depth) { return Map.copyOf(map); } + static EsSourceOptions randomEsSourceOptions() { + EsSourceOptions eso = new EsSourceOptions(); + if (randomBoolean()) { + eso.addOption("allow_no_indices", String.valueOf(randomBoolean())); + } + if (randomBoolean()) { + eso.addOption("ignore_unavailable", String.valueOf(randomBoolean())); + } + if (randomBoolean()) { + String idsList = String.join(",", randomList(1, 5, PlanNamedTypesTests::randomName)); + eso.addOption( + "preference", + randomFrom( + "_only_local", + "_local", + "_only_nodes:" + idsList, + "_prefer_nodes:" + idsList, + "_shards:" + idsList, + randomName() + ) + ); + } + return eso; + } + static List DATA_TYPES = EsqlDataTypes.types().stream().toList(); static DataType randomDataType() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 9e215e45fbde2..304f693adf89c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.parser; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.Randomness; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; @@ -587,7 +589,7 @@ public void testMetadataFieldOnOtherSources() { expectError("show info metadata _index", "line 1:11: token recognition error at: 'm'"); expectError( "explain [from foo] metadata _index", - "line 1:20: mismatched input 'metadata' expecting {'|', ',', OPENING_BRACKET, ']', 'metadata'}" + "line 1:20: mismatched input 'metadata' expecting {'|', ',', OPENING_BRACKET, ']', 'options', 'metadata'}" ); } @@ -611,6 +613,106 @@ public void testMetadataFieldNotFoundNormalField() { expectError("from test metadata emp_no", "line 1:21: unsupported metadata field [emp_no]"); } + public void testFromOptionsUnknownName() { + expectError(FROM + " options \"foo\"=\"oof\",\"bar\"=\"rab\"", "line 1:20: invalid options provided: unknown option named [foo]"); + } + + public void testFromOptionsPartialInvalid() { + expectError( + FROM + " options \"allow_no_indices\"=\"true\",\"bar\"=\"rab\"", + "line 1:46: invalid options provided: unknown option named [bar]" + ); + } + + public void testFromOptionsInvalidIndicesOptionValue() { + expectError( + FROM + " options \"allow_no_indices\"=\"foo\"", + "line 1:20: invalid options provided: Could not convert [allow_no_indices] to boolean" + ); + } + + public void testFromOptionsEmptyIndicesOptionName() { + expectError(FROM + " options \"\"=\"true\"", "line 1:20: invalid options provided: unknown option named []"); + } + + public void testFromOptionsEmptyIndicesOptionValue() { + expectError( + FROM + " options \"allow_no_indices\"=\"\"", + "line 1:20: invalid options provided: Could not convert [allow_no_indices] to boolean. " + + "Failed to parse value [] as only [true] or [false] are allowed." + ); + expectError( + FROM + " options \"ignore_unavailable\"=\"TRUE\"", + "line 1:20: invalid options provided: Could not convert [ignore_unavailable] to boolean. " + + "Failed to parse value [TRUE] as only [true] or [false] are allowed." + ); + expectError(FROM + " options \"preference\"=\"\"", "line 1:20: invalid options provided: no Preference for []"); + } + + public void testFromOptionsSuggestedOptionName() { + expectError( + FROM + " options \"allow_indices\"=\"true\"", + "line 1:20: invalid options provided: unknown option named [allow_indices], did you mean [allow_no_indices]?" + ); + } + + public void testFromOptionsInvalidPreferValue() { + expectError(FROM + " options \"preference\"=\"_foo\"", "line 1:20: invalid options provided: no Preference for [_foo]"); + } + + public void testFromOptionsUnquotedName() { + expectError(FROM + " options allow_no_indices=\"oof\"", "line 1:19: mismatched input 'allow_no_indices' expecting QUOTED_STRING"); + } + + public void testFromOptionsUnquotedValue() { + expectError(FROM + " options \"allow_no_indices\"=oof", "line 1:38: mismatched input 'oof' expecting QUOTED_STRING"); + } + + public void testFromOptionsDuplicates() { + for (var name : List.of("allow_no_indices", "ignore_unavailable", "preference")) { + String options = '"' + name + "\"=\"false\""; + options += ',' + options; + expectError(FROM + " options " + options, "invalid options provided: option [" + name + "] has already been provided"); + } + } + + public void testFromOptionsValues() { + boolean allowNoIndices = randomBoolean(); + boolean ignoreUnavailable = randomBoolean(); + String idsList = String.join(",", randomList(1, 5, () -> randomAlphaOfLengthBetween(1, 25))); + String preference = randomFrom( + "_only_local", + "_local", + "_only_nodes:" + idsList, + "_prefer_nodes:" + idsList, + "_shards:" + idsList, + randomAlphaOfLengthBetween(1, 25) + ); + List options = new ArrayList<>(3); + options.add("\"allow_no_indices\"=\"" + allowNoIndices + "\""); + options.add("\"ignore_unavailable\"=\"" + ignoreUnavailable + "\""); + options.add("\"preference\"=\"" + preference + "\""); + Randomness.shuffle(options); + String optionsList = String.join(",", options); + + var plan = statement(FROM + " OPTIONS " + optionsList); + var unresolved = as(plan, EsqlUnresolvedRelation.class); + assertNotNull(unresolved.esSourceOptions()); + var indicesOptions = unresolved.esSourceOptions().indicesOptions(SearchRequest.DEFAULT_INDICES_OPTIONS); + assertThat(indicesOptions.allowNoIndices(), is(allowNoIndices)); + assertThat(indicesOptions.ignoreUnavailable(), is(ignoreUnavailable)); + assertThat(unresolved.esSourceOptions().preference(), is(preference)); + } + + public void testFromOptionsWithMetadata() { + var plan = statement(FROM + " OPTIONS \"preference\"=\"foo\" METADATA _id"); + var unresolved = as(plan, EsqlUnresolvedRelation.class); + assertNotNull(unresolved.esSourceOptions()); + assertThat(unresolved.esSourceOptions().preference(), is("foo")); + assertFalse(unresolved.metadataFields().isEmpty()); + assertThat(unresolved.metadataFields().get(0).qualifiedName(), is("_id")); + } + public void testDissectPattern() { LogicalPlan cmd = processingCommand("dissect a \"%{foo}\""); assertEquals(Dissect.class, cmd.getClass()); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/options/EsSourceOptions.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/options/EsSourceOptions.java new file mode 100644 index 0000000000000..25b40b4b447fd --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/options/EsSourceOptions.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql.options; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.ql.util.StringUtils; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.action.support.IndicesOptions.ConcreteTargetOptions.IGNORE_UNAVAILABLE; +import static org.elasticsearch.action.support.IndicesOptions.WildcardOptions.ALLOW_NO_INDICES; + +/* + * This provides a repository for index resolution and/or search-time configuration options. + * Such as: [search] preference and [search / index resolution] allow_no_indices, ignore_unavailable. + * + * Some of the options end up in a IndicesOptions instance. However, FieldCaps and Search APIs use IndicesOptions + * defaults having conflicting values. So this class will just validate and record the user-provided settings first, and then apply these + * onto a base (an API-specific default). + */ +public class EsSourceOptions { + + private static final String OPTION_PREFERENCE = "preference"; + public static final EsSourceOptions NO_OPTIONS = new EsSourceOptions(); + + @Nullable + private String allowNoIndices; + @Nullable + private String ignoreUnavailable; + @Nullable + private String preference; + + public EsSourceOptions() {} + + public EsSourceOptions(StreamInput in) throws IOException { + this.allowNoIndices = in.readOptionalString(); + this.ignoreUnavailable = in.readOptionalString(); + this.preference = in.readOptionalString(); + } + + public IndicesOptions indicesOptions(IndicesOptions base) { + if (allowNoIndices == null && ignoreUnavailable == null) { + return base; + } + var wildcardOptions = allowNoIndices != null + ? IndicesOptions.WildcardOptions.parseParameters(null, allowNoIndices, base.wildcardOptions()) + : base.wildcardOptions(); + var targetOptions = ignoreUnavailable != null + ? IndicesOptions.ConcreteTargetOptions.fromParameter(ignoreUnavailable, base.concreteTargetOptions()) + : base.concreteTargetOptions(); + return new IndicesOptions(targetOptions, wildcardOptions, base.gatekeeperOptions(), base.failureStoreOptions()); + } + + @Nullable + public String preference() { + return preference; + } + + public void addOption(String name, String value) { + switch (name) { + case ALLOW_NO_INDICES -> { + requireUnset(name, allowNoIndices); + IndicesOptions.WildcardOptions.parseParameters(null, value, null); + allowNoIndices = value; + } + case IGNORE_UNAVAILABLE -> { + requireUnset(name, ignoreUnavailable); + IndicesOptions.ConcreteTargetOptions.fromParameter(value, null); + ignoreUnavailable = value; + } + case OPTION_PREFERENCE -> { + requireUnset(name, preference); + // The validation applies only for the predefined settings (i.e. prefixed by '_') or empty one (i.e. delegate handling + // of this case). + if (value.isEmpty() || value.charAt(0) == '_') { + // Note: _search will neither fail, nor warn about something like `preference=_shards:0,1|_doesnotexist` + Preference.parse(value); + } + preference = value; + } + default -> { + String message = "unknown option named [" + name + "]"; + List matches = StringUtils.findSimilar(name, List.of(ALLOW_NO_INDICES, IGNORE_UNAVAILABLE, OPTION_PREFERENCE)); + if (matches.isEmpty() == false) { + String suggestions = matches.size() == 1 ? "[" + matches.get(0) + "]" : "any of " + matches; + message += ", did you mean " + suggestions + "?"; + } + throw new IllegalArgumentException(message); + } + } + } + + private static void requireUnset(String name, String value) { + if (value != null) { + throw new IllegalArgumentException("option [" + name + "] has already been provided"); + } + } + + public void writeEsSourceOptions(StreamOutput out) throws IOException { + out.writeOptionalString(allowNoIndices); + out.writeOptionalString(ignoreUnavailable); + out.writeOptionalString(preference); + } + + @Override + public int hashCode() { + return Objects.hash(allowNoIndices, ignoreUnavailable, preference); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + EsSourceOptions other = (EsSourceOptions) obj; + return Objects.equals(allowNoIndices, other.allowNoIndices) + && Objects.equals(ignoreUnavailable, other.ignoreUnavailable) + && Objects.equals(preference, other.preference); + } +} diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java index 4a31309ac8f2f..94e0177972306 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java @@ -9,6 +9,7 @@ import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.index.EsIndex; +import org.elasticsearch.xpack.ql.options.EsSourceOptions; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.NodeUtils; import org.elasticsearch.xpack.ql.tree.Source; @@ -24,26 +25,33 @@ public class EsRelation extends LeafPlan { private final EsIndex index; private final List attrs; + private final EsSourceOptions esSourceOptions; private final boolean frozen; public EsRelation(Source source, EsIndex index, boolean frozen) { - this(source, index, flatten(source, index.mapping()), frozen); + this(source, index, flatten(source, index.mapping()), EsSourceOptions.NO_OPTIONS, frozen); } public EsRelation(Source source, EsIndex index, List attributes) { - this(source, index, attributes, false); + this(source, index, attributes, EsSourceOptions.NO_OPTIONS, false); } - public EsRelation(Source source, EsIndex index, List attributes, boolean frozen) { + public EsRelation(Source source, EsIndex index, List attributes, EsSourceOptions esSourceOptions) { + this(source, index, attributes, esSourceOptions, false); + } + + public EsRelation(Source source, EsIndex index, List attributes, EsSourceOptions esSourceOptions, boolean frozen) { super(source); this.index = index; this.attrs = attributes; + Objects.requireNonNull(esSourceOptions); + this.esSourceOptions = esSourceOptions; this.frozen = frozen; } @Override protected NodeInfo info() { - return NodeInfo.create(this, EsRelation::new, index, attrs, frozen); + return NodeInfo.create(this, EsRelation::new, index, attrs, esSourceOptions, frozen); } private static List flatten(Source source, Map mapping) { @@ -73,6 +81,10 @@ public EsIndex index() { return index; } + public EsSourceOptions esSourceOptions() { + return esSourceOptions; + } + public boolean frozen() { return frozen; } @@ -89,7 +101,7 @@ public boolean expressionsResolved() { @Override public int hashCode() { - return Objects.hash(index, frozen); + return Objects.hash(index, esSourceOptions, frozen); } @Override @@ -103,7 +115,7 @@ public boolean equals(Object obj) { } EsRelation other = (EsRelation) obj; - return Objects.equals(index, other.index) && frozen == other.frozen; + return Objects.equals(index, other.index) && Objects.equals(esSourceOptions, other.esSourceOptions) && frozen == other.frozen; } @Override From 54ae1e5e7b2547045dd93ac6649f1caee784edc5 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Thu, 28 Mar 2024 18:18:14 +0100 Subject: [PATCH 13/69] Use more robust assertion in SimpleThreadPoolIT.testThreadPoolMetrics (#106624) Assert using greaterThanOrEqualTo to allow for additional scheduled background threads to appear in collected measurements after the thread pool stats have already been pulled, e.g. this could be the case for the cluster coordination thread pool. --- .../threadpool/SimpleThreadPoolIT.java | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index fa6126c13c741..44b6ef1d51ce0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -39,6 +39,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.matchesRegex; @@ -117,7 +118,6 @@ public void testThreadNames() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104652") public void testThreadPoolMetrics() throws Exception { internalCluster().startNode(); @@ -148,15 +148,17 @@ public void testThreadPoolMetrics() throws Exception { assertNoFailures(prepareSearch("idx").setQuery(QueryBuilders.termQuery("str_value", "s" + i))); assertNoFailures(prepareSearch("idx").setQuery(QueryBuilders.termQuery("l_value", i))); } + final var tp = internalCluster().getInstance(ThreadPool.class, dataNodeName); + final var tps = new ThreadPoolStats[1]; // wait for all threads to complete so that we get deterministic results - waitUntil(() -> tp.stats().stats().stream().allMatch(s -> s.active() == 0)); - ThreadPoolStats tps = tp.stats(); + waitUntil(() -> (tps[0] = tp.stats()).stats().stream().allMatch(s -> s.active() == 0)); + plugin.collect(); ArrayList registeredMetrics = plugin.getRegisteredMetrics(InstrumentType.LONG_GAUGE); registeredMetrics.addAll(plugin.getRegisteredMetrics(InstrumentType.LONG_ASYNC_COUNTER)); - tps.forEach(stats -> { + tps[0].forEach(stats -> { Map threadPoolStats = List.of( Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_COMPLETED, stats.completed()), Map.entry(ThreadPool.THREAD_POOL_METRIC_NAME_ACTIVE, (long) stats.active()), @@ -182,7 +184,9 @@ public void testThreadPoolMetrics() throws Exception { logger.info("Stats of `{}`: {}", stats.name(), threadPoolStats); logger.info("Measurements of `{}`: {}", stats.name(), measurements); - threadPoolStats.forEach((metric, value) -> assertThat(measurements, hasEntry(equalTo(metric), contains(equalTo(value))))); + threadPoolStats.forEach( + (metric, value) -> assertThat(measurements, hasEntry(equalTo(metric), contains(greaterThanOrEqualTo(value)))) + ); }); } From a7bc24b6528b8cb0f391f79682def2976da6e32f Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 28 Mar 2024 20:24:36 +0200 Subject: [PATCH 14/69] Update 8.13 release notes with known issue on downsampling (#106881) * Update 8.13 release notes with known issue * revert unintended * reword * reword * reword --- docs/reference/release-notes/8.13.0.asciidoc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/reference/release-notes/8.13.0.asciidoc b/docs/reference/release-notes/8.13.0.asciidoc index 2ef183374f167..47855773d0543 100644 --- a/docs/reference/release-notes/8.13.0.asciidoc +++ b/docs/reference/release-notes/8.13.0.asciidoc @@ -3,6 +3,16 @@ Also see <>. +[[known-issues-8.13.0]] +[float] +=== Known issues + +* Nodes upgraded to 8.13.0 fail to load downsampling persistent tasks. This prevents them from joining the cluster, blocking its upgrade (issue: {es-issue}106880[#106880]) ++ +This affects clusters running version 8.10 or later, with an active downsampling +https://www.elastic.co/guide/en/elasticsearch/reference/current/downsampling-ilm.html[configuration] +or a configuration that was activated at some point since upgrading to version 8.10 or later. + [[breaking-8.13.0]] [float] === Breaking changes From 4b23c137895b7995dc1b6801148ff113408c6b73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Thu, 28 Mar 2024 20:51:56 +0100 Subject: [PATCH 15/69] Add PersistentTask exceptions (#106883) Add a couple of exceptions that can be used in TransportActions to check/raise error if specific conditions on PersistentTasks (and the node they are allocated to) are not met. --- .../elasticsearch/ElasticsearchException.java | 14 ++++++++ .../org/elasticsearch/TransportVersions.java | 1 + .../NotPersistentTaskNodeException.java | 34 +++++++++++++++++++ ...ersistentTaskNodeNotAssignedException.java | 34 +++++++++++++++++++ .../ExceptionSerializationTests.java | 4 +++ 5 files changed, 87 insertions(+) create mode 100644 server/src/main/java/org/elasticsearch/persistent/NotPersistentTaskNodeException.java create mode 100644 server/src/main/java/org/elasticsearch/persistent/PersistentTaskNodeNotAssignedException.java diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 83e5375546b63..626f0a491e44d 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -32,6 +32,8 @@ import org.elasticsearch.indices.FailureIndexNotSupportedException; import org.elasticsearch.indices.recovery.RecoveryCommitTooNewException; import org.elasticsearch.ingest.GraphStructureException; +import org.elasticsearch.persistent.NotPersistentTaskNodeException; +import org.elasticsearch.persistent.PersistentTaskNodeNotAssignedException; import org.elasticsearch.rest.ApiNotAvailableException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchException; @@ -1917,6 +1919,18 @@ private enum ElasticsearchExceptionHandle { FailureIndexNotSupportedException::new, 178, TransportVersions.ADD_FAILURE_STORE_INDICES_OPTIONS + ), + NOT_PERSISTENT_TASK_NODE_EXCEPTION( + NotPersistentTaskNodeException.class, + NotPersistentTaskNodeException::new, + 179, + TransportVersions.ADD_PERSISTENT_TASK_EXCEPTIONS + ), + PERSISTENT_TASK_NODE_NOT_ASSIGNED_EXCEPTION( + PersistentTaskNodeNotAssignedException.class, + PersistentTaskNodeNotAssignedException::new, + 180, + TransportVersions.ADD_PERSISTENT_TASK_EXCEPTIONS ); final Class exceptionClass; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 698c879c3f252..1be7bd795d1c1 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -156,6 +156,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_L2_NORM_SIMILARITY_ADDED = def(8_616_00_0); public static final TransportVersion SEARCH_NODE_LOAD_AUTOSCALING = def(8_617_00_0); public static final TransportVersion ESQL_ES_SOURCE_OPTIONS = def(8_618_00_0); + public static final TransportVersion ADD_PERSISTENT_TASK_EXCEPTIONS = def(8_619_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/persistent/NotPersistentTaskNodeException.java b/server/src/main/java/org/elasticsearch/persistent/NotPersistentTaskNodeException.java new file mode 100644 index 0000000000000..d51d7518a47da --- /dev/null +++ b/server/src/main/java/org/elasticsearch/persistent/NotPersistentTaskNodeException.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.persistent; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +/** + * Exception which indicates that an operation failed because the node stopped being the node on which the PersistentTask is allocated. + */ +public class NotPersistentTaskNodeException extends ElasticsearchException { + + public NotPersistentTaskNodeException(String nodeId, String persistentTaskName) { + super("Node [{}] is not hosting PersistentTask [{}]", nodeId, persistentTaskName); + } + + public NotPersistentTaskNodeException(StreamInput in) throws IOException { + super(in); + } + + @Override + public RestStatus status() { + return RestStatus.SERVICE_UNAVAILABLE; + } +} diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTaskNodeNotAssignedException.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTaskNodeNotAssignedException.java new file mode 100644 index 0000000000000..199c3d384eb7a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTaskNodeNotAssignedException.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.persistent; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; + +/** + * Exception which indicates that the PersistentTask node has not been assigned yet. + */ +public class PersistentTaskNodeNotAssignedException extends ElasticsearchException { + + public PersistentTaskNodeNotAssignedException(String persistentTaskName) { + super("PersistentTask [{}] has not been yet assigned to a node on this cluster", persistentTaskName); + } + + public PersistentTaskNodeNotAssignedException(StreamInput in) throws IOException { + super(in); + } + + @Override + public RestStatus status() { + return RestStatus.SERVICE_UNAVAILABLE; + } +} diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index b8091b50b5dd8..764f12a386c02 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -68,6 +68,8 @@ import org.elasticsearch.indices.recovery.RecoveryCommitTooNewException; import org.elasticsearch.ingest.GraphStructureException; import org.elasticsearch.ingest.IngestProcessorException; +import org.elasticsearch.persistent.NotPersistentTaskNodeException; +import org.elasticsearch.persistent.PersistentTaskNodeNotAssignedException; import org.elasticsearch.repositories.RepositoryConflictException; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.rest.ApiNotAvailableException; @@ -829,6 +831,8 @@ public void testIds() { ids.put(176, SearchTimeoutException.class); ids.put(177, GraphStructureException.class); ids.put(178, FailureIndexNotSupportedException.class); + ids.put(179, NotPersistentTaskNodeException.class); + ids.put(180, PersistentTaskNodeNotAssignedException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { From c9b5aa09244001451a74f1b1528a9aee0c3844c4 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 28 Mar 2024 17:43:59 -0400 Subject: [PATCH 16/69] Actually use TypedDataSupplier as intended (#106892) --- .../expression/function/TestCaseSupplier.java | 42 +++++++++---------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 2bae1546cd02f..d600e51c07925 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -234,24 +234,13 @@ private static TestCaseSupplier testCaseSupplier( ) { String caseName = lhsSupplier.name() + ", " + rhsSupplier.name(); return new TestCaseSupplier(caseName, List.of(lhsSupplier.type(), rhsSupplier.type()), () -> { - Object lhs = lhsSupplier.supplier().get(); - Object rhs = rhsSupplier.supplier().get(); - TypedData lhsTyped = new TypedData( - // TODO there has to be a better way to handle unsigned long - lhs instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : lhs, - lhsSupplier.type(), - "lhs" - ); - TypedData rhsTyped = new TypedData( - rhs instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : rhs, - rhsSupplier.type(), - "rhs" - ); + TypedData lhsTyped = lhsSupplier.get(); + TypedData rhsTyped = rhsSupplier.get(); TestCase testCase = new TestCase( List.of(lhsTyped, rhsTyped), evaluatorToString.apply(lhsSupplier.type(), rhsSupplier.type()), expectedType, - equalTo(expectedValue.apply(lhs, rhs)) + equalTo(expectedValue.apply(lhsTyped.getValue(), rhsTyped.getValue())) ); for (String warning : warnings) { testCase = testCase.withWarning(warning); @@ -710,13 +699,8 @@ public static void unary( ) { for (TypedDataSupplier supplier : valueSuppliers) { suppliers.add(new TestCaseSupplier(supplier.name(), List.of(supplier.type()), () -> { - Object value = supplier.supplier().get(); - TypedData typed = new TypedData( - // TODO there has to be a better way to handle unsigned long - value instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : value, - supplier.type(), - "value" - ); + TypedData typed = supplier.get(); + Object value = typed.getValue(); logger.info("Value is " + value + " of type " + value.getClass()); logger.info("expectedValue is " + expectedValue.apply(value)); TestCase testCase = new TestCase( @@ -1304,7 +1288,11 @@ public static class TypedData { * @param forceLiteral should this data always be converted to a literal and never to a field reference? */ private TypedData(Object data, DataType type, String name, boolean forceLiteral) { - this.data = data; + if (type == DataTypes.UNSIGNED_LONG && data instanceof BigInteger b) { + this.data = NumericUtils.asLongUnsigned(b); + } else { + this.data = data; + } this.type = type; this.name = name; this.forceLiteral = forceLiteral; @@ -1379,6 +1367,16 @@ public Object data() { return data; } + /** + * @return the data value being supplied, casting unsigned longs into BigIntegers correctly + */ + public Object getValue() { + if (type == DataTypes.UNSIGNED_LONG && data instanceof Long l) { + return NumericUtils.unsignedLongAsBigInteger(l); + } + return data; + } + /** * Type of the value. For building {@link Expression}s. */ From f7fedb4d0aec5dc60bf52bb4c460584d08a236ce Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Fri, 29 Mar 2024 04:01:45 +0100 Subject: [PATCH 17/69] Fix downsample persistent task params serialization bwc (#106878) Missing a check on the transport version results in unreadable cluster state if it includes a serialized instance of DownsampleShardTaskParams. #98023 introduced an optional string array including dimensions used by time serie indices. Reading an optional array requires reading a boolean first which is required to know if an array of values exists in serialized form. From 8.13 on we try to read such a boolean which is not there because older versions don't write any boolean nor any string array. Here we include the check on versions for backward compatibility skipping reading any boolean or array whatsoever whenever not possible. Customers using downsampling might have cluster states including such serielized objects and would be unable to upgrade to version 8.13. They will be able to upgrade to any version including this fix. This fix has a side effect #106880 --- docs/changelog/106878.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../downsample/qa/mixed-cluster/build.gradle | 49 ++++++ .../MixedClusterDownsampleRestIT.java | 48 ++++++ .../test/downsample/10_basic.yml | 152 ++++++++++++++++++ .../downsample/DownsampleShardIndexer.java | 9 ++ .../downsample/DownsampleShardTaskParams.java | 19 ++- .../DownsampleShardTaskParamsTests.java | 134 +++++++++++++++ 8 files changed, 413 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/106878.yaml create mode 100644 x-pack/plugin/downsample/qa/mixed-cluster/build.gradle create mode 100644 x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/MixedClusterDownsampleRestIT.java create mode 100644 x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml create mode 100644 x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParamsTests.java diff --git a/docs/changelog/106878.yaml b/docs/changelog/106878.yaml new file mode 100644 index 0000000000000..585475bb5ea55 --- /dev/null +++ b/docs/changelog/106878.yaml @@ -0,0 +1,5 @@ +pr: 106878 +summary: Gate reading of optional string array for bwc +area: Downsampling +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1be7bd795d1c1..76f4d6c1c0fae 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -133,6 +133,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEX_REQUEST_NORMALIZED_BYTES_PARSED = def(8_593_00_0); public static final TransportVersion INGEST_GRAPH_STRUCTURE_EXCEPTION = def(8_594_00_0); public static final TransportVersion ML_MODEL_IN_SERVICE_SETTINGS = def(8_595_00_0); + // 8.14.0+ public static final TransportVersion RANDOM_AGG_SHARD_SEED = def(8_596_00_0); public static final TransportVersion ESQL_TIMINGS = def(8_597_00_0); public static final TransportVersion DATA_STREAM_AUTO_SHARDING_EVENT = def(8_598_00_0); diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle new file mode 100644 index 0000000000000..2449991a8e1e0 --- /dev/null +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.internal.info.BuildParams +import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask + +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.internal-test-artifact' +apply plugin: 'elasticsearch.bwc-test' + + +dependencies { + testImplementation project(path: ':test:test-clusters') + yamlRestTestImplementation project(path: xpackModule('rollup')) +} + +restResources { + restApi { + include '_common', 'bulk', 'cluster', 'indices', 'search', 'ingest.put_pipeline', 'ingest.delete_pipeline' + } +} + +def supportedVersion = bwcVersion -> { + return bwcVersion.onOrAfter("8.8.0"); +} + +BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> + + def yamlRestTest = tasks.register("v${bwcVersion}#yamlRestTest", StandaloneRestIntegTestTask) { + usesDefaultDistribution() + usesBwcDistribution(bwcVersion) + systemProperty("tests.old_cluster_version", bwcVersion) + testClassesDirs = sourceSets.yamlRestTest.output.classesDirs + classpath = sourceSets.yamlRestTest.runtimeClasspath + } + + tasks.register(bwcTaskName(bwcVersion)) { + dependsOn yamlRestTest + } +} + +tasks.named("yamlRestTest") { + enabled = false +} diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/MixedClusterDownsampleRestIT.java b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/MixedClusterDownsampleRestIT.java new file mode 100644 index 0000000000000..a4765271e7300 --- /dev/null +++ b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/downsample/MixedClusterDownsampleRestIT.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.downsample; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.Version; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; + +public class MixedClusterDownsampleRestIT extends ESClientYamlSuiteTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .withNode(node -> node.version(getOldVersion())) + .withNode(node -> node.version(Version.CURRENT)) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + static Version getOldVersion() { + return Version.fromString(System.getProperty("tests.old_cluster_version")); + } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public MixedClusterDownsampleRestIT(final ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } + +} diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml new file mode 100644 index 0000000000000..265f97e73c234 --- /dev/null +++ b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -0,0 +1,152 @@ +setup: + - skip: + version: " - 8.4.99" + reason: "rollup renamed to downsample in 8.5.0" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + index: + mode: time_series + routing_path: [metricset, k8s.pod.uid] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + created_at: + type: date_nanos + running: + type: boolean + number_of_containers: + type: integer + ip: + type: ip + tags: + type: keyword + values: + type: integer + multi-counter: + type: long + time_series_metric: counter + scaled-counter: + type: scaled_float + scaling_factor: 100 + time_series_metric: counter + multi-gauge: + type: integer + time_series_metric: gauge + scaled-gauge: + type: scaled_float + scaling_factor: 100 + time_series_metric: gauge + network: + properties: + tx: + type: long + time_series_metric: gauge + rx: + type: long + time_series_metric: gauge + - do: + bulk: + refresh: true + index: test + body: + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "multi-counter" : [10, 11, 12], "scaled-counter": 10.0, "multi-gauge": [100, 200, 150], "scaled-gauge": 100.0, "network": {"tx": 2001818691, "rx": 802133794}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "multi-counter" : [21, 22, 23], "scaled-counter": 20.0, "multi-gauge": [90, 91, 95], "scaled-gauge": 90.0, "network": {"tx": 2005177954, "rx": 801479970}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "multi-counter" : [1, 5, 10], "scaled-counter": 1.0, "multi-gauge": [103, 110, 109], "scaled-gauge": 104.0, "network": {"tx": 2006223737, "rx": 802337279}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "multi-counter" : [101, 102, 105], "scaled-counter": 100.0, "multi-gauge": [100, 100, 100], "scaled-gauge": 102.0, "network": {"tx": 2012916202, "rx": 803685721}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "multi-counter" : [7, 11, 44], "scaled-counter": 7.0, "multi-gauge": [100, 100, 102], "scaled-gauge": 100.0, "network": {"tx": 1434521831, "rx": 530575198}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "multi-counter" : [0, 0, 1], "scaled-counter": 0.0, "multi-gauge": [101, 102, 102], "scaled-gauge": 101.0, "network": {"tx": 1434577921, "rx": 530600088}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "multi-counter" : [1000, 1001, 1002], "scaled-counter": 1000.0, "multi-gauge": [99, 100, 110], "scaled-gauge": 99.0, "network": {"tx": 1434587694, "rx": 530604797}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' + - '{"index": {}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "multi-counter" : [76, 77, 78], "scaled-counter": 70.0, "multi-gauge": [95, 98, 100], "scaled-gauge": 95.0, "network": {"tx": 1434595272, "rx": 530605511}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' + + - do: + indices.put_settings: + index: test + body: + index.blocks.write: true + +--- +"Downsample index": + + - do: + indices.downsample: + index: test + target_index: test-downsample + body: > + { + "fixed_interval": "1h" + } + - is_true: acknowledged + + - do: + search: + index: test-downsample + body: + sort: [ "@timestamp" ] + + - length: { hits.hits: 4 } + - match: { hits.hits.0._source._doc_count: 2 } + - match: { hits.hits.0._source.metricset: pod } + + # Assert rollup index settings + - do: + indices.get_settings: + index: test-downsample + + - match: { test-downsample.settings.index.mode: time_series } + - match: { test-downsample.settings.index.time_series.end_time: 2021-04-29T00:00:00Z } + - match: { test-downsample.settings.index.time_series.start_time: 2021-04-28T00:00:00Z } + - match: { test-downsample.settings.index.routing_path: [ "metricset", "k8s.pod.uid"] } + - match: { test-downsample.settings.index.downsample.source.name: test } + + # Assert rollup index mapping + - do: + indices.get_mapping: + index: test-downsample + + - match: { test-downsample.mappings.properties.@timestamp.type: date } + - match: { test-downsample.mappings.properties.@timestamp.meta.fixed_interval: 1h } + - match: { test-downsample.mappings.properties.@timestamp.meta.time_zone: UTC } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.type: aggregate_metric_double } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.default_metric: max } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.time_series_metric: gauge } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.type: long } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.time_series_metric: counter } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.type: scaled_float } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.scaling_factor: 100 } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.time_series_metric: counter } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.type: aggregate_metric_double } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.default_metric: max } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.time_series_metric: gauge } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.type: keyword } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.time_series_dimension: true } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java index 59c1c9c38efae..72d4b934ecdda 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardIndexer.java @@ -564,6 +564,15 @@ public XContentBuilder buildDownsampleDocument() throws IOException { fieldProducer.write(builder); } + if (dimensions.length == 0) { + logger.debug("extracting dimensions from legacy tsid"); + Map dimensions = (Map) DocValueFormat.TIME_SERIES_ID.format(tsid); + for (Map.Entry e : dimensions.entrySet()) { + assert e.getValue() != null; + builder.field((String) e.getKey(), e.getValue()); + } + } + builder.endObject(); return builder; } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java index 4ccc913b974d6..93be79e859f8d 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java @@ -10,6 +10,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.downsample.DownsampleConfig; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; @@ -36,6 +37,7 @@ public record DownsampleShardTaskParams( String[] dimensions ) implements PersistentTaskParams { + private static final TransportVersion V_8_13_0 = TransportVersions.ML_MODEL_IN_SERVICE_SETTINGS; public static final String NAME = DownsampleShardTask.TASK_NAME; private static final ParseField DOWNSAMPLE_CONFIG = new ParseField("downsample_config"); private static final ParseField DOWNSAMPLE_INDEX = new ParseField("rollup_index"); @@ -71,7 +73,7 @@ public record DownsampleShardTaskParams( new ShardId(in), in.readStringArray(), in.readStringArray(), - in.readOptionalStringArray() + in.getTransportVersion().onOrAfter(V_8_13_0) ? in.readOptionalStringArray() : new String[] {} ); } @@ -85,7 +87,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(SHARD_ID.getPreferredName(), shardId); builder.array(METRICS.getPreferredName(), metrics); builder.array(LABELS.getPreferredName(), labels); - builder.array(DIMENSIONS.getPreferredName(), dimensions); + if (dimensions.length > 0) { + builder.array(DIMENSIONS.getPreferredName(), dimensions); + } return builder.endObject(); } @@ -108,7 +112,9 @@ public void writeTo(StreamOutput out) throws IOException { shardId.writeTo(out); out.writeStringArray(metrics); out.writeStringArray(labels); - out.writeOptionalStringArray(dimensions); + if (out.getTransportVersion().onOrAfter(V_8_13_0)) { + out.writeOptionalStringArray(dimensions); + } } public static DownsampleShardTaskParams fromXContent(XContentParser parser) throws IOException { @@ -157,7 +163,7 @@ public static class Builder { ShardId shardId; String[] metrics; String[] labels; - String[] dimensions; + String[] dimensions = Strings.EMPTY_ARRAY; public Builder downsampleConfig(final DownsampleConfig downsampleConfig) { this.downsampleConfig = downsampleConfig; @@ -212,4 +218,9 @@ public DownsampleShardTaskParams build() { ); } } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } } diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParamsTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParamsTests.java new file mode 100644 index 0000000000000..7d79ae720271e --- /dev/null +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParamsTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.downsample; + +import org.elasticsearch.action.downsample.DownsampleConfig; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class DownsampleShardTaskParamsTests extends AbstractXContentSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return DownsampleShardTaskParams::new; + } + + @Override + protected DownsampleShardTaskParams createTestInstance() { + long startTime = randomLongBetween(100000, 200000); + long endTime = startTime + randomLongBetween(1000, 10_000); + String[] dimensions = randomBoolean() ? generateRandomStringArray(5, 5, false, true) : new String[] {}; + return new DownsampleShardTaskParams( + new DownsampleConfig(randomFrom(DateHistogramInterval.HOUR, DateHistogramInterval.DAY)), + randomAlphaOfLength(5), + startTime, + endTime, + new ShardId(new Index(randomAlphaOfLength(5), "n/a"), between(0, 5)), + generateRandomStringArray(5, 5, false, false), + generateRandomStringArray(5, 5, false, false), + dimensions + ); + } + + @Override + protected DownsampleShardTaskParams mutateInstance(DownsampleShardTaskParams in) throws IOException { + return switch (between(0, 7)) { + case 0 -> new DownsampleShardTaskParams( + new DownsampleConfig(randomFrom(DateHistogramInterval.WEEK, DateHistogramInterval.MONTH)), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 1 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + randomAlphaOfLength(6), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 2 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis() + between(1, 100), + in.indexEndTimeMillis() + between(1, 100), + in.shardId(), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 3 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis() + between(10, 100), + new ShardId(new Index(randomAlphaOfLength(6), "n/a"), between(0, 5)), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 4 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis() + between(10, 100), + in.shardId(), + in.metrics(), + in.labels(), + in.dimensions() + ); + case 5 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + generateRandomStringArray(6, 6, false, false), + in.labels(), + in.dimensions() + ); + case 6 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + in.metrics(), + generateRandomStringArray(6, 6, false, false), + in.dimensions() + ); + case 7 -> new DownsampleShardTaskParams( + in.downsampleConfig(), + in.downsampleIndex(), + in.indexStartTimeMillis(), + in.indexEndTimeMillis(), + in.shardId(), + in.metrics(), + in.labels(), + generateRandomStringArray(6, 6, false, false) + ); + default -> throw new AssertionError("unknown option"); + }; + } + + @Override + protected DownsampleShardTaskParams doParseInstance(XContentParser parser) throws IOException { + return DownsampleShardTaskParams.fromXContent(parser); + } +} From b4938e16457dc69d392235eaf404a6dad9ddb717 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 29 Mar 2024 09:24:52 +0200 Subject: [PATCH 18/69] Query API Key Information API support for the `typed_keys` request parameter (#106873) The typed_keys request parameter is the canonical parameter, that's also used in the regular index _search enpoint, in order to return the types of aggregations in the response. This is required by typed language clients of the _security/_query/api_key endpoint that are using aggregations. Closes #106817 --- docs/changelog/106873.yaml | 6 + .../rest-api/security/query-api-key.asciidoc | 4 + docs/reference/search/search.asciidoc | 4 +- .../api/security.query_api_keys.json | 5 + .../xpack/security/ApiKeyAggsIT.java | 136 +++++++++++------- .../action/apikey/RestQueryApiKeyAction.java | 7 + 6 files changed, 105 insertions(+), 57 deletions(-) create mode 100644 docs/changelog/106873.yaml diff --git a/docs/changelog/106873.yaml b/docs/changelog/106873.yaml new file mode 100644 index 0000000000000..f823caff7aefe --- /dev/null +++ b/docs/changelog/106873.yaml @@ -0,0 +1,6 @@ +pr: 106873 +summary: Query API Key Information API support for the `typed_keys` request parameter +area: Security +type: enhancement +issues: + - 106817 diff --git a/docs/reference/rest-api/security/query-api-key.asciidoc b/docs/reference/rest-api/security/query-api-key.asciidoc index f2969aa43d57b..1888a110e072f 100644 --- a/docs/reference/rest-api/security/query-api-key.asciidoc +++ b/docs/reference/rest-api/security/query-api-key.asciidoc @@ -159,6 +159,10 @@ its <> and the owner user's If it exists, the profile uid is returned under the `profile_uid` response field for each API key. Defaults to `false`. +`typed_keys`:: +(Optional, Boolean) If `true`, aggregation names are prefixed by their respective types in the response. +Defaults to `false`. + [[security-api-query-api-key-request-body]] ==== {api-request-body-title} diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index 074c950d3e987..f602b6457c31e 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -341,8 +341,8 @@ If `true`, the exact number of hits is returned at the cost of some performance. If `false`, the response does not include the total number of hits matching the query. `typed_keys`:: -(Optional, Boolean) If `true`, aggregation and suggester names are be prefixed -by their respective types in the response. Defaults to `true`. +(Optional, Boolean) If `true`, aggregation and suggester names are prefixed +by their respective types in the response. Defaults to `false`. `version`:: (Optional, Boolean) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json index 1127f79294910..de95f76ad49db 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_api_keys.json @@ -31,6 +31,11 @@ "type":"boolean", "default":false, "description": "flag to also retrieve the API Key's owner profile uid, if it exists" + }, + "typed_keys":{ + "type":"boolean", + "default":false, + "description": "flag to prefix aggregation names by their respective types in the response" } }, "body":{ diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java index f4fa304f9c1e2..427d918fd64d5 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/ApiKeyAggsIT.java @@ -65,7 +65,8 @@ public void testFiltersAggs() throws IOException { ), API_KEY_USER_AUTH_HEADER ); - assertAggs(API_KEY_ADMIN_AUTH_HEADER, """ + final boolean typedAggs = randomBoolean(); + assertAggs(API_KEY_ADMIN_AUTH_HEADER, typedAggs, """ { "aggs": { "hostnames": { @@ -79,22 +80,23 @@ public void testFiltersAggs() throws IOException { } } """, aggs -> { - assertThat(((Map) ((Map) aggs.get("hostnames")).get("buckets")).size(), is(2)); + String aggName = typedAggs ? "filters#hostnames" : "hostnames"; + assertThat(((Map) ((Map) aggs.get(aggName)).get("buckets")).size(), is(2)); assertThat( - ((Map) ((Map) ((Map) aggs.get("hostnames")).get("buckets")).get( + ((Map) ((Map) ((Map) aggs.get(aggName)).get("buckets")).get( "my-org-host-1" )).get("doc_count"), is(2) ); assertThat( - ((Map) ((Map) ((Map) aggs.get("hostnames")).get("buckets")).get( + ((Map) ((Map) ((Map) aggs.get(aggName)).get("buckets")).get( "my-org-host-2" )).get("doc_count"), is(2) ); }); // other bucket - assertAggs(API_KEY_USER_AUTH_HEADER, """ + assertAggs(API_KEY_USER_AUTH_HEADER, typedAggs, """ { "aggs": { "only_user_keys": { @@ -108,22 +110,23 @@ public void testFiltersAggs() throws IOException { } } """, aggs -> { - assertThat(((Map) ((Map) aggs.get("only_user_keys")).get("buckets")).size(), is(2)); + String aggName = typedAggs ? "filters#only_user_keys" : "only_user_keys"; + assertThat(((Map) ((Map) aggs.get(aggName)).get("buckets")).size(), is(2)); assertThat( - ((Map) ((Map) ((Map) aggs.get("only_user_keys")).get("buckets")).get( + ((Map) ((Map) ((Map) aggs.get(aggName)).get("buckets")).get( "only_key4_match" )).get("doc_count"), is(1) ); assertThat( - ((Map) ((Map) ((Map) aggs.get("only_user_keys")).get("buckets")).get( + ((Map) ((Map) ((Map) aggs.get(aggName)).get("buckets")).get( "other_user_keys" )).get("doc_count"), is(1) ); }); // anonymous filters - assertAggs(API_KEY_USER_AUTH_HEADER, """ + assertAggs(API_KEY_USER_AUTH_HEADER, typedAggs, """ { "aggs": { "all_user_keys": { @@ -139,27 +142,28 @@ public void testFiltersAggs() throws IOException { } } """, aggs -> { - assertThat(((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).size(), is(4)); + String aggName = typedAggs ? "filters#all_user_keys" : "all_user_keys"; + assertThat(((List>) ((Map) aggs.get(aggName)).get("buckets")).size(), is(4)); assertThat( - ((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).get(0).get("doc_count"), + ((List>) ((Map) aggs.get(aggName)).get("buckets")).get(0).get("doc_count"), is(2) ); assertThat( - ((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).get(1).get("doc_count"), + ((List>) ((Map) aggs.get(aggName)).get("buckets")).get(1).get("doc_count"), is(2) ); assertThat( - ((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).get(2).get("doc_count"), + ((List>) ((Map) aggs.get(aggName)).get("buckets")).get(2).get("doc_count"), is(2) ); // the "other" bucket assertThat( - ((List>) ((Map) aggs.get("all_user_keys")).get("buckets")).get(3).get("doc_count"), + ((List>) ((Map) aggs.get(aggName)).get("buckets")).get(3).get("doc_count"), is(0) ); }); // nested filters - assertAggs(API_KEY_USER_AUTH_HEADER, """ + assertAggs(API_KEY_USER_AUTH_HEADER, typedAggs, """ { "aggs": { "level1": { @@ -184,36 +188,44 @@ public void testFiltersAggs() throws IOException { } } """, aggs -> { - List> level1Buckets = (List>) ((Map) aggs.get("level1")).get("buckets"); + String level1AggName = typedAggs ? "filters#level1" : "level1"; + List> level1Buckets = (List>) ((Map) aggs.get(level1AggName)).get( + "buckets" + ); assertThat(level1Buckets.size(), is(2)); assertThat(level1Buckets.get(0).get("doc_count"), is(2)); assertThat(level1Buckets.get(0).get("key"), is("rest-filter")); + String level2AggName = typedAggs ? "filters#level2" : "level2"; assertThat( - ((Map) ((Map) ((Map) level1Buckets.get(0).get("level2")).get("buckets")) - .get("invalidated")).get("doc_count"), + ((Map) ((Map) ((Map) level1Buckets.get(0).get(level2AggName)).get( + "buckets" + )).get("invalidated")).get("doc_count"), is(0) ); assertThat( - ((Map) ((Map) ((Map) level1Buckets.get(0).get("level2")).get("buckets")) - .get("not-invalidated")).get("doc_count"), + ((Map) ((Map) ((Map) level1Buckets.get(0).get(level2AggName)).get( + "buckets" + )).get("not-invalidated")).get("doc_count"), is(2) ); assertThat(level1Buckets.get(1).get("doc_count"), is(2)); assertThat(level1Buckets.get(1).get("key"), is("user-filter")); assertThat( - ((Map) ((Map) ((Map) level1Buckets.get(1).get("level2")).get("buckets")) - .get("invalidated")).get("doc_count"), + ((Map) ((Map) ((Map) level1Buckets.get(1).get(level2AggName)).get( + "buckets" + )).get("invalidated")).get("doc_count"), is(0) ); assertThat( - ((Map) ((Map) ((Map) level1Buckets.get(1).get("level2")).get("buckets")) - .get("not-invalidated")).get("doc_count"), + ((Map) ((Map) ((Map) level1Buckets.get(1).get(level2AggName)).get( + "buckets" + )).get("not-invalidated")).get("doc_count"), is(2) ); }); // filter on disallowed fields { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (randomBoolean() ? "?typed_keys" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -240,7 +252,7 @@ public void testFiltersAggs() throws IOException { ); } { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (randomBoolean() ? "?typed_keys" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -310,7 +322,8 @@ public void testAggsForType() throws IOException { updateApiKeys(systemWriteCreds, "ctx._source['type']='cross_cluster';", crossApiKeyIds); boolean isAdmin = randomBoolean(); - assertAggs(isAdmin ? API_KEY_ADMIN_AUTH_HEADER : API_KEY_USER_AUTH_HEADER, """ + final boolean typedAggs = randomBoolean(); + assertAggs(isAdmin ? API_KEY_ADMIN_AUTH_HEADER : API_KEY_USER_AUTH_HEADER, typedAggs, """ { "size": 0, "aggs": { @@ -324,9 +337,8 @@ public void testAggsForType() throws IOException { } } """, aggs -> { - List> buckets = (List>) ((Map) aggs.get("all_keys_by_type")).get( - "buckets" - ); + String aggName = typedAggs ? "composite#all_keys_by_type" : "all_keys_by_type"; + List> buckets = (List>) ((Map) aggs.get(aggName)).get("buckets"); assertThat(buckets.size(), is(3)); assertThat(((Map) buckets.get(0).get("key")).get("type"), is("cross_cluster")); assertThat(((Map) buckets.get(1).get("key")).get("type"), is("other")); @@ -342,7 +354,7 @@ public void testAggsForType() throws IOException { } }); - assertAggs(isAdmin ? API_KEY_ADMIN_AUTH_HEADER : API_KEY_USER_AUTH_HEADER, """ + assertAggs(isAdmin ? API_KEY_ADMIN_AUTH_HEADER : API_KEY_USER_AUTH_HEADER, typedAggs, """ { "size": 0, "aggs": { @@ -371,23 +383,23 @@ public void testAggsForType() throws IOException { """, aggs -> { assertThat(aggs.size(), is(4)); // 3 types - assertThat(((Map) aggs.get("type_cardinality")).get("value"), is(3)); + assertThat(((Map) aggs.get((typedAggs ? "cardinality#" : "") + "type_cardinality")).get("value"), is(3)); if (isAdmin) { // 8 keys - assertThat(((Map) aggs.get("type_value_count")).get("value"), is(8)); + assertThat(((Map) aggs.get((typedAggs ? "value_count#" : "") + "type_value_count")).get("value"), is(8)); } else { // 4 keys - assertThat(((Map) aggs.get("type_value_count")).get("value"), is(4)); + assertThat(((Map) aggs.get((typedAggs ? "value_count#" : "") + "type_value_count")).get("value"), is(4)); } - assertThat(((Map) aggs.get("missing_type_count")).get("doc_count"), is(0)); - List> typeTermsBuckets = (List>) ((Map) aggs.get("type_terms")).get( - "buckets" - ); + assertThat(((Map) aggs.get((typedAggs ? "missing#" : "") + "missing_type_count")).get("doc_count"), is(0)); + List> typeTermsBuckets = (List>) ((Map) aggs.get( + (typedAggs ? "sterms#" : "") + "type_terms" + )).get("buckets"); assertThat(typeTermsBuckets.size(), is(3)); }); // runtime type field is disallowed { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (typedAggs ? "?typed_keys" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -432,7 +444,8 @@ public void testFilterAggs() throws IOException { invalidateApiKey(key2User1KeyId, false, API_KEY_ADMIN_AUTH_HEADER); invalidateApiKey(key1User3KeyId, false, API_KEY_ADMIN_AUTH_HEADER); - assertAggs(API_KEY_ADMIN_AUTH_HEADER, """ + final boolean typedAggs = randomBoolean(); + assertAggs(API_KEY_ADMIN_AUTH_HEADER, typedAggs, """ { "size": 0, "aggs": { @@ -451,10 +464,11 @@ public void testFilterAggs() throws IOException { } } """, aggs -> { - assertThat(((Map) aggs.get("not_invalidated")).get("doc_count"), is(4)); // 6 - 2 (invalidated) + // 6 - 2 (invalidated) + assertThat(((Map) aggs.get(typedAggs ? "filter#not_invalidated" : "not_invalidated")).get("doc_count"), is(4)); List> buckets = (List>) ((Map) ((Map) aggs.get( - "not_invalidated" - )).get("keys_by_username")).get("buckets"); + typedAggs ? "filter#not_invalidated" : "not_invalidated" + )).get(typedAggs ? "composite#keys_by_username" : "keys_by_username")).get("buckets"); assertThat(buckets.size(), is(3)); assertThat(((Map) buckets.get(0).get("key")).get("usernames"), is("test-user-1")); assertThat(buckets.get(0).get("doc_count"), is(1)); @@ -464,7 +478,7 @@ public void testFilterAggs() throws IOException { assertThat(buckets.get(2).get("doc_count"), is(1)); }); - assertAggs(API_KEY_ADMIN_AUTH_HEADER, """ + assertAggs(API_KEY_ADMIN_AUTH_HEADER, typedAggs, """ { "aggs": { "keys_by_username": { @@ -488,23 +502,32 @@ public void testFilterAggs() throws IOException { } } """, aggs -> { - List> buckets = (List>) ((Map) aggs.get("keys_by_username")).get( - "buckets" - ); + List> buckets = (List>) ((Map) aggs.get( + typedAggs ? "composite#keys_by_username" : "keys_by_username" + )).get("buckets"); assertThat(buckets.size(), is(3)); assertThat(buckets.get(0).get("doc_count"), is(2)); assertThat(((Map) buckets.get(0).get("key")).get("usernames"), is("test-user-1")); - assertThat(((Map) buckets.get(0).get("not_expired")).get("doc_count"), is(0)); + assertThat( + ((Map) buckets.get(0).get(typedAggs ? "filter#not_expired" : "not_expired")).get("doc_count"), + is(0) + ); assertThat(buckets.get(1).get("doc_count"), is(2)); assertThat(((Map) buckets.get(1).get("key")).get("usernames"), is("test-user-2")); - assertThat(((Map) buckets.get(1).get("not_expired")).get("doc_count"), is(1)); + assertThat( + ((Map) buckets.get(1).get(typedAggs ? "filter#not_expired" : "not_expired")).get("doc_count"), + is(1) + ); assertThat(buckets.get(2).get("doc_count"), is(2)); assertThat(((Map) buckets.get(2).get("key")).get("usernames"), is("test-user-3")); - assertThat(((Map) buckets.get(2).get("not_expired")).get("doc_count"), is(2)); + assertThat( + ((Map) buckets.get(2).get(typedAggs ? "filter#not_expired" : "not_expired")).get("doc_count"), + is(2) + ); }); // "creator" field is disallowed { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (typedAggs ? "?typed_keys" : "?typed_keys=false")); request.setOptions( request.getOptions() .toBuilder() @@ -533,7 +556,7 @@ public void testFilterAggs() throws IOException { public void testDisallowedAggTypes() { // global aggregation type MUST never be allowed in order to not expose non-owned non-API key docs { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (randomBoolean() ? "?typed_keys=true" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -559,7 +582,7 @@ public void testDisallowedAggTypes() { } // pipeline aggs are not allowed but could be if there's an identified use-case { - Request request = new Request("GET", "/_security/_query/api_key"); + Request request = new Request("GET", "/_security/_query/api_key" + (randomBoolean() ? "?typed_keys=true" : "")); request.setOptions( request.getOptions() .toBuilder() @@ -587,8 +610,11 @@ public void testDisallowedAggTypes() { } } - void assertAggs(String authHeader, String body, Consumer> aggsVerifier) throws IOException { - final Request request = new Request("GET", "/_security/_query/api_key"); + void assertAggs(String authHeader, boolean typedAggs, String body, Consumer> aggsVerifier) throws IOException { + final Request request = new Request( + "GET", + "/_security/_query/api_key" + (typedAggs ? randomFrom("?typed_keys", "?typed_keys=true") : randomFrom("", "?typed_keys=false")) + ); request.setJsonEntity(body); request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, authHeader)); final Response response = client().performRequest(request); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java index 7e7768212719b..77c2a080dbb57 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -29,6 +30,7 @@ import java.io.IOException; import java.util.List; +import java.util.Set; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseTopLevelQuery; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -99,6 +101,11 @@ public String getName() { return "xpack_security_query_api_key"; } + @Override + protected Set responseParams() { + return Set.of(RestSearchAction.TYPED_KEYS_PARAM); + } + @Override protected RestChannelConsumer innerPrepareRequest(final RestRequest request, final NodeClient client) throws IOException { final boolean withLimitedBy = request.paramAsBoolean("with_limited_by", false); From 3d53707adc87e8816e7b7e9fb6b1ce075d00e837 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Fri, 29 Mar 2024 22:53:20 +1100 Subject: [PATCH 19/69] [Doc] Remove invalid watcher ssl enabled settings (#106901) In #99115 we updated the ssl settings template to cover new features. It inadvertently introduced a doc bug that show invalied ssl.enabled settings for watcher HTTP and email. This PR removes them. --- docs/reference/settings/notification-settings.asciidoc | 2 ++ docs/reference/settings/ssl-settings.asciidoc | 2 ++ 2 files changed, 4 insertions(+) diff --git a/docs/reference/settings/notification-settings.asciidoc b/docs/reference/settings/notification-settings.asciidoc index af6636606630a..4a48c26974084 100644 --- a/docs/reference/settings/notification-settings.asciidoc +++ b/docs/reference/settings/notification-settings.asciidoc @@ -95,6 +95,7 @@ corresponding endpoints are explicitly allowed as well. :verifies: :server!: :ssl-context: watcher +:no-enabled-setting: include::ssl-settings.asciidoc[] @@ -284,6 +285,7 @@ Defaults to `Warning: The attachment [%s] contains characters which spreadsheet :verifies: :server!: :ssl-context: watcher-email +:no-enabled-setting: include::ssl-settings.asciidoc[] diff --git a/docs/reference/settings/ssl-settings.asciidoc b/docs/reference/settings/ssl-settings.asciidoc index 2c0eef7077f4d..2ab32c1a65c2d 100644 --- a/docs/reference/settings/ssl-settings.asciidoc +++ b/docs/reference/settings/ssl-settings.asciidoc @@ -1,6 +1,7 @@ ==== {component} TLS/SSL settings You can configure the following TLS/SSL settings. +ifndef::no-enabled-setting[] +{ssl-prefix}.ssl.enabled+:: (<>) Used to enable or disable TLS/SSL on the {ssl-layer}. @@ -10,6 +11,7 @@ endif::enabled-by-default[] ifndef::enabled-by-default[] The default is `false`. endif::enabled-by-default[] +endif::no-enabled-setting[] +{ssl-prefix}.ssl.supported_protocols+:: (<>) From 4352791daa4080c2f17275e3429ef833da031e93 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Fri, 29 Mar 2024 14:19:11 +0200 Subject: [PATCH 20/69] Add rolling upgrade test for persistent health node task (#106902) --- .../upgrades/HealthNodeUpgradeIT.java | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java new file mode 100644 index 0000000000000..184343349d317 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.hamcrest.Matchers; + +import java.nio.charset.StandardCharsets; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { + + public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testHealthNode() throws Exception { + if (clusterHasFeature("health.supports_health")) { + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_cat/tasks")); + String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + assertThat(tasks, Matchers.containsString("health-node")); + }); + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_health_report")); + Map health_report = entityAsMap(response.getEntity()); + assertThat(health_report.get("status"), equalTo("green")); + }); + } + } +} From e59dd0b60efd865f3e182d4da340c38006cf764e Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Fri, 29 Mar 2024 08:40:37 -0600 Subject: [PATCH 21/69] Add total size in bytes to doc stats (#106840) --- docs/changelog/106840.yaml | 6 ++++++ docs/reference/cluster/stats.asciidoc | 7 ++++++- docs/reference/rest-api/common-parms.asciidoc | 2 +- .../main/java/org/elasticsearch/index/shard/DocsStats.java | 2 ++ .../collector/cluster/ClusterStatsMonitoringDocTests.java | 3 ++- 5 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/106840.yaml diff --git a/docs/changelog/106840.yaml b/docs/changelog/106840.yaml new file mode 100644 index 0000000000000..3f6831e4907ca --- /dev/null +++ b/docs/changelog/106840.yaml @@ -0,0 +1,6 @@ +pr: 106840 +summary: Add total size in bytes to doc stats +area: Stats +type: enhancement +issues: + - 97670 diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index dad6b46686ea2..6d8a8f748fa0e 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -223,6 +223,10 @@ selected nodes. + This number is based on documents in Lucene segments. {es} reclaims the disk space of deleted Lucene documents when a segment is merged. + +`total_size_in_bytes`:: +(integer) +Total size in bytes across all primary shards assigned to selected nodes. ===== `store`:: @@ -1594,7 +1598,8 @@ The API returns the following response: }, "docs": { "count": 10, - "deleted": 0 + "deleted": 0, + "total_size_in_bytes": 8833 }, "store": { "size": "16.2kb", diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 4d71634f38acf..d0fd4087c275c 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -508,7 +508,7 @@ Return all statistics. <> statistics. `docs`:: -Number of documents and deleted docs, which have not yet merged out. +Number of documents, number of deleted docs which have not yet merged out, and total size in bytes. <> can affect this statistic. `fielddata`:: diff --git a/server/src/main/java/org/elasticsearch/index/shard/DocsStats.java b/server/src/main/java/org/elasticsearch/index/shard/DocsStats.java index 0d8fc52cddacf..20a7ffe9c7433 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/DocsStats.java +++ b/server/src/main/java/org/elasticsearch/index/shard/DocsStats.java @@ -81,6 +81,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(Fields.DOCS); builder.field(Fields.COUNT, count); builder.field(Fields.DELETED, deleted); + builder.field(Fields.TOTAL_SIZE_IN_BYTES, totalSizeInBytes); builder.endObject(); return builder; } @@ -102,5 +103,6 @@ static final class Fields { static final String DOCS = "docs"; static final String COUNT = "count"; static final String DELETED = "deleted"; + static final String TOTAL_SIZE_IN_BYTES = "total_size_in_bytes"; } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index d88adea7aaef3..cb270c7f19ae8 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -522,7 +522,8 @@ public void testToXContent() throws IOException { }, "docs": { "count": 0, - "deleted": 0 + "deleted": 0, + "total_size_in_bytes": 0 }, "store": { "size_in_bytes": 0, From 1c35baa603e130e76fe17156ffa6558750df8bcc Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 29 Mar 2024 11:52:16 -0400 Subject: [PATCH 22/69] Slightly better geoip databaseType validation (#106889) --- docs/changelog/106889.yaml | 5 +++ .../ingest/geoip/GeoIpProcessor.java | 27 ++++++++------ .../geoip/GeoIpProcessorFactoryTests.java | 36 +++++++++++++++++++ 3 files changed, 58 insertions(+), 10 deletions(-) create mode 100644 docs/changelog/106889.yaml diff --git a/docs/changelog/106889.yaml b/docs/changelog/106889.yaml new file mode 100644 index 0000000000000..7755081d09036 --- /dev/null +++ b/docs/changelog/106889.yaml @@ -0,0 +1,5 @@ +pr: 106889 +summary: Slightly better geoip `databaseType` validation +area: Ingest Node +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 6a5fb9007377b..087f0ac9480f8 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -175,10 +175,7 @@ private Map getGeoData(GeoIpDatabase geoIpDatabase, String ip) t } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { geoData = retrieveAsnGeoData(geoIpDatabase, ipAddress); } else { - throw new ElasticsearchParseException( - "Unsupported database type [" + geoIpDatabase.getDatabaseType() + "]", - new IllegalStateException() - ); + throw new ElasticsearchParseException("Unsupported database type [" + databaseType + "]", new IllegalStateException()); } return geoData; } @@ -440,12 +437,24 @@ public Processor create( // pipeline. return new DatabaseUnavailableProcessor(processorTag, description, databaseFile); } + final String databaseType; try { databaseType = geoIpDatabase.getDatabaseType(); } finally { geoIpDatabase.release(); } + if (databaseType == null + || (databaseType.endsWith(CITY_DB_SUFFIX) + || databaseType.endsWith(COUNTRY_DB_SUFFIX) + || databaseType.endsWith(ASN_DB_SUFFIX)) == false) { + throw newConfigurationException( + TYPE, + processorTag, + "database_file", + "Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]" + ); + } final Set properties; if (propertyNames != null) { @@ -466,12 +475,8 @@ public Processor create( } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { properties = DEFAULT_ASN_PROPERTIES; } else { - throw newConfigurationException( - TYPE, - processorTag, - "database_file", - "Unsupported database type [" + databaseType + "]" - ); + assert false : "unsupported database type [" + databaseType + "]"; + properties = Set.of(); } } return new GeoIpProcessor( @@ -545,6 +550,8 @@ public static Property parseProperty(String databaseType, String value) { validProperties = ALL_COUNTRY_PROPERTIES; } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { validProperties = ALL_ASN_PROPERTIES; + } else { + assert false : "unsupported database type [" + databaseType + "]"; } try { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index dee9ba3189c26..20e0fa9be6c06 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -50,6 +50,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -287,6 +288,41 @@ public void testBuildIllegalFieldOption() throws Exception { assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); } + public void testBuildUnsupportedDatabase() throws Exception { + // mock up some unsupported database (it has a databaseType that we don't recognize) + GeoIpDatabase database = mock(GeoIpDatabase.class); + when(database.getDatabaseType()).thenReturn("some-unsupported-database"); + GeoIpDatabaseProvider provider = mock(GeoIpDatabaseProvider.class); + when(provider.getDatabase(anyString())).thenReturn(database); + + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(provider); + + Map config1 = new HashMap<>(); + config1.put("field", "_field"); + config1.put("properties", List.of("ip")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config1)); + assertThat( + e.getMessage(), + equalTo("[database_file] Unsupported database type [some-unsupported-database] for file [GeoLite2-City.mmdb]") + ); + } + + public void testBuildNullDatabase() throws Exception { + // mock up a provider that returns a null databaseType + GeoIpDatabase database = mock(GeoIpDatabase.class); + when(database.getDatabaseType()).thenReturn(null); + GeoIpDatabaseProvider provider = mock(GeoIpDatabaseProvider.class); + when(provider.getDatabase(anyString())).thenReturn(database); + + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(provider); + + Map config1 = new HashMap<>(); + config1.put("field", "_field"); + config1.put("properties", List.of("ip")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config1)); + assertThat(e.getMessage(), equalTo("[database_file] Unsupported database type [null] for file [GeoLite2-City.mmdb]")); + } + @SuppressWarnings("HiddenField") public void testLazyLoading() throws Exception { final Path configDir = createTempDir(); From 00b0c54a7482d13a6b062dc77d92615133cf4364 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 29 Mar 2024 12:24:31 -0400 Subject: [PATCH 23/69] ESQL: Generate docs for the trig functions (#106891) This updates the in-code docs on the trig functions to line up with the docs, removes the docs, and uses the now mostly identical generated docs. This means we only need to document these functions in one place - right next to the code. --- docs/reference/esql/functions/cos.asciidoc | 31 ------------------- .../esql/functions/description/cos.asciidoc | 2 +- .../esql/functions/description/cosh.asciidoc | 2 +- .../esql/functions/description/sin.asciidoc | 2 +- .../esql/functions/description/sinh.asciidoc | 2 +- .../esql/functions/description/tan.asciidoc | 2 +- .../esql/functions/description/tanh.asciidoc | 2 +- .../esql/functions/examples/cos.asciidoc | 13 ++++++++ .../esql/functions/examples/cosh.asciidoc | 13 ++++++++ .../esql/functions/examples/sin.asciidoc | 13 ++++++++ .../esql/functions/examples/sinh.asciidoc | 13 ++++++++ .../esql/functions/examples/tan.asciidoc | 13 ++++++++ .../esql/functions/examples/tanh.asciidoc | 13 ++++++++ .../esql/functions/layout/cos.asciidoc | 1 + .../esql/functions/layout/cosh.asciidoc | 1 + .../esql/functions/layout/sin.asciidoc | 1 + .../esql/functions/layout/sinh.asciidoc | 1 + .../esql/functions/layout/tan.asciidoc | 1 + .../esql/functions/layout/tanh.asciidoc | 1 + .../esql/functions/math-functions.asciidoc | 12 +++---- .../esql/functions/parameters/abs.asciidoc | 2 ++ .../esql/functions/parameters/acos.asciidoc | 2 ++ .../esql/functions/parameters/asin.asciidoc | 2 ++ .../esql/functions/parameters/atan.asciidoc | 2 ++ .../esql/functions/parameters/atan2.asciidoc | 2 ++ .../functions/parameters/auto_bucket.asciidoc | 2 ++ .../esql/functions/parameters/case.asciidoc | 2 ++ .../esql/functions/parameters/ceil.asciidoc | 2 ++ .../functions/parameters/coalesce.asciidoc | 2 ++ .../esql/functions/parameters/concat.asciidoc | 2 ++ .../esql/functions/parameters/cos.asciidoc | 6 ++-- .../esql/functions/parameters/cosh.asciidoc | 6 ++-- .../functions/parameters/date_diff.asciidoc | 2 ++ .../parameters/date_extract.asciidoc | 2 ++ .../functions/parameters/date_format.asciidoc | 2 ++ .../functions/parameters/date_parse.asciidoc | 2 ++ .../esql/functions/parameters/e.asciidoc | 2 ++ .../functions/parameters/ends_with.asciidoc | 2 ++ .../esql/functions/parameters/floor.asciidoc | 2 ++ .../functions/parameters/greatest.asciidoc | 2 ++ .../esql/functions/parameters/least.asciidoc | 2 ++ .../esql/functions/parameters/left.asciidoc | 2 ++ .../esql/functions/parameters/length.asciidoc | 2 ++ .../esql/functions/parameters/log.asciidoc | 2 ++ .../esql/functions/parameters/log10.asciidoc | 2 ++ .../esql/functions/parameters/ltrim.asciidoc | 2 ++ .../esql/functions/parameters/mv_avg.asciidoc | 2 ++ .../functions/parameters/mv_concat.asciidoc | 2 ++ .../functions/parameters/mv_count.asciidoc | 2 ++ .../functions/parameters/mv_dedupe.asciidoc | 2 ++ .../functions/parameters/mv_first.asciidoc | 2 ++ .../functions/parameters/mv_last.asciidoc | 2 ++ .../esql/functions/parameters/mv_max.asciidoc | 2 ++ .../functions/parameters/mv_median.asciidoc | 2 ++ .../esql/functions/parameters/mv_min.asciidoc | 2 ++ .../functions/parameters/mv_slice.asciidoc | 2 ++ .../functions/parameters/mv_sort.asciidoc | 2 ++ .../esql/functions/parameters/mv_sum.asciidoc | 2 ++ .../esql/functions/parameters/mv_zip.asciidoc | 2 ++ .../esql/functions/parameters/pi.asciidoc | 2 ++ .../esql/functions/parameters/pow.asciidoc | 2 ++ .../functions/parameters/replace.asciidoc | 2 ++ .../esql/functions/parameters/right.asciidoc | 2 ++ .../esql/functions/parameters/round.asciidoc | 2 ++ .../esql/functions/parameters/rtrim.asciidoc | 2 ++ .../esql/functions/parameters/sin.asciidoc | 6 ++-- .../esql/functions/parameters/sinh.asciidoc | 6 ++-- .../esql/functions/parameters/split.asciidoc | 2 ++ .../esql/functions/parameters/sqrt.asciidoc | 2 ++ .../parameters/st_intersects.asciidoc | 2 ++ .../esql/functions/parameters/st_x.asciidoc | 2 ++ .../esql/functions/parameters/st_y.asciidoc | 2 ++ .../functions/parameters/starts_with.asciidoc | 2 ++ .../functions/parameters/substring.asciidoc | 2 ++ .../esql/functions/parameters/tan.asciidoc | 6 ++-- .../esql/functions/parameters/tanh.asciidoc | 6 ++-- .../esql/functions/parameters/tau.asciidoc | 2 ++ .../functions/parameters/to_boolean.asciidoc | 2 ++ .../parameters/to_cartesianpoint.asciidoc | 2 ++ .../parameters/to_cartesianshape.asciidoc | 2 ++ .../functions/parameters/to_datetime.asciidoc | 2 ++ .../functions/parameters/to_degrees.asciidoc | 2 ++ .../functions/parameters/to_double.asciidoc | 2 ++ .../functions/parameters/to_geopoint.asciidoc | 2 ++ .../functions/parameters/to_geoshape.asciidoc | 2 ++ .../functions/parameters/to_integer.asciidoc | 2 ++ .../esql/functions/parameters/to_ip.asciidoc | 2 ++ .../functions/parameters/to_long.asciidoc | 2 ++ .../functions/parameters/to_lower.asciidoc | 2 ++ .../functions/parameters/to_radians.asciidoc | 2 ++ .../functions/parameters/to_string.asciidoc | 2 ++ .../parameters/to_unsigned_long.asciidoc | 2 ++ .../functions/parameters/to_upper.asciidoc | 2 ++ .../functions/parameters/to_version.asciidoc | 2 ++ .../esql/functions/parameters/trim.asciidoc | 2 ++ .../esql/functions/signature/cos.svg | 2 +- .../esql/functions/signature/cosh.svg | 2 +- .../esql/functions/signature/sin.svg | 2 +- .../esql/functions/signature/sinh.svg | 2 +- .../esql/functions/signature/tan.svg | 2 +- .../esql/functions/signature/tanh.svg | 2 +- docs/reference/esql/functions/sin.asciidoc | 31 ------------------- docs/reference/esql/functions/sinh.asciidoc | 30 ------------------ docs/reference/esql/functions/tan.asciidoc | 31 ------------------- docs/reference/esql/functions/tanh.asciidoc | 30 ------------------ .../esql/functions/types/cos.asciidoc | 2 +- .../esql/functions/types/cosh.asciidoc | 2 +- .../esql/functions/types/sin.asciidoc | 2 +- .../esql/functions/types/sinh.asciidoc | 2 +- .../esql/functions/types/tan.asciidoc | 2 +- .../esql/functions/types/tanh.asciidoc | 2 +- x-pack/plugin/esql/build.gradle | 2 +- .../src/main/resources/meta.csv-spec | 30 +++++++++--------- .../expression/function/scalar/math/Cos.java | 15 +++++++-- .../expression/function/scalar/math/Cosh.java | 15 ++++++--- .../expression/function/scalar/math/Sin.java | 15 +++++++-- .../expression/function/scalar/math/Sinh.java | 15 ++++++--- .../expression/function/scalar/math/Tan.java | 15 +++++++-- .../expression/function/scalar/math/Tanh.java | 15 ++++++--- .../function/AbstractFunctionTestCase.java | 1 + 120 files changed, 353 insertions(+), 229 deletions(-) delete mode 100644 docs/reference/esql/functions/cos.asciidoc create mode 100644 docs/reference/esql/functions/examples/cos.asciidoc create mode 100644 docs/reference/esql/functions/examples/cosh.asciidoc create mode 100644 docs/reference/esql/functions/examples/sin.asciidoc create mode 100644 docs/reference/esql/functions/examples/sinh.asciidoc create mode 100644 docs/reference/esql/functions/examples/tan.asciidoc create mode 100644 docs/reference/esql/functions/examples/tanh.asciidoc delete mode 100644 docs/reference/esql/functions/sin.asciidoc delete mode 100644 docs/reference/esql/functions/sinh.asciidoc delete mode 100644 docs/reference/esql/functions/tan.asciidoc delete mode 100644 docs/reference/esql/functions/tanh.asciidoc diff --git a/docs/reference/esql/functions/cos.asciidoc b/docs/reference/esql/functions/cos.asciidoc deleted file mode 100644 index 7fa1d973c86b6..0000000000000 --- a/docs/reference/esql/functions/cos.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-cos]] -=== `COS` - -*Syntax* - -[.text-center] -image::esql/functions/signature/cos.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -Returns the {wikipedia}/Sine_and_cosine[cosine] of `n`. Input expected in -radians. - -include::types/cos.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=cos] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=cos-result] -|=== diff --git a/docs/reference/esql/functions/description/cos.asciidoc b/docs/reference/esql/functions/description/cos.asciidoc index e46d651b34c00..101489faabe1c 100644 --- a/docs/reference/esql/functions/description/cos.asciidoc +++ b/docs/reference/esql/functions/description/cos.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the trigonometric cosine of an angle +Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. diff --git a/docs/reference/esql/functions/description/cosh.asciidoc b/docs/reference/esql/functions/description/cosh.asciidoc index deaf780addb93..bfe51f9152875 100644 --- a/docs/reference/esql/functions/description/cosh.asciidoc +++ b/docs/reference/esql/functions/description/cosh.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the hyperbolic cosine of a number +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle. diff --git a/docs/reference/esql/functions/description/sin.asciidoc b/docs/reference/esql/functions/description/sin.asciidoc index 4a5f04732fccc..ba12ba88ca37a 100644 --- a/docs/reference/esql/functions/description/sin.asciidoc +++ b/docs/reference/esql/functions/description/sin.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the trigonometric sine of an angle +Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle. diff --git a/docs/reference/esql/functions/description/sinh.asciidoc b/docs/reference/esql/functions/description/sinh.asciidoc index a51b88c7d446e..bb7761e2a0254 100644 --- a/docs/reference/esql/functions/description/sinh.asciidoc +++ b/docs/reference/esql/functions/description/sinh.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the hyperbolic sine of a number +Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle. diff --git a/docs/reference/esql/functions/description/tan.asciidoc b/docs/reference/esql/functions/description/tan.asciidoc index 1f6a4f96f59f1..925bebf044a7b 100644 --- a/docs/reference/esql/functions/description/tan.asciidoc +++ b/docs/reference/esql/functions/description/tan.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the trigonometric tangent of an angle +Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle. diff --git a/docs/reference/esql/functions/description/tanh.asciidoc b/docs/reference/esql/functions/description/tanh.asciidoc index 277783f7f70fe..7ee5e457dfe48 100644 --- a/docs/reference/esql/functions/description/tanh.asciidoc +++ b/docs/reference/esql/functions/description/tanh.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the hyperbolic tangent of a number +Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle. diff --git a/docs/reference/esql/functions/examples/cos.asciidoc b/docs/reference/esql/functions/examples/cos.asciidoc new file mode 100644 index 0000000000000..6d673fb413753 --- /dev/null +++ b/docs/reference/esql/functions/examples/cos.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=cos] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=cos-result] +|=== + diff --git a/docs/reference/esql/functions/examples/cosh.asciidoc b/docs/reference/esql/functions/examples/cosh.asciidoc new file mode 100644 index 0000000000000..bd9a8759f64e4 --- /dev/null +++ b/docs/reference/esql/functions/examples/cosh.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=cosh] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=cosh-result] +|=== + diff --git a/docs/reference/esql/functions/examples/sin.asciidoc b/docs/reference/esql/functions/examples/sin.asciidoc new file mode 100644 index 0000000000000..33ef89de10c70 --- /dev/null +++ b/docs/reference/esql/functions/examples/sin.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=sin] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=sin-result] +|=== + diff --git a/docs/reference/esql/functions/examples/sinh.asciidoc b/docs/reference/esql/functions/examples/sinh.asciidoc new file mode 100644 index 0000000000000..84eff26a0068e --- /dev/null +++ b/docs/reference/esql/functions/examples/sinh.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=sinh] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=sinh-result] +|=== + diff --git a/docs/reference/esql/functions/examples/tan.asciidoc b/docs/reference/esql/functions/examples/tan.asciidoc new file mode 100644 index 0000000000000..a8ad1ae8c2151 --- /dev/null +++ b/docs/reference/esql/functions/examples/tan.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=tan] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=tan-result] +|=== + diff --git a/docs/reference/esql/functions/examples/tanh.asciidoc b/docs/reference/esql/functions/examples/tanh.asciidoc new file mode 100644 index 0000000000000..1f1a1d8d38397 --- /dev/null +++ b/docs/reference/esql/functions/examples/tanh.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/floats.csv-spec[tag=tanh] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/floats.csv-spec[tag=tanh-result] +|=== + diff --git a/docs/reference/esql/functions/layout/cos.asciidoc b/docs/reference/esql/functions/layout/cos.asciidoc index 7b97f40529096..1bb3e2544bc6f 100644 --- a/docs/reference/esql/functions/layout/cos.asciidoc +++ b/docs/reference/esql/functions/layout/cos.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/cos.svg[Embedded,opts=inline] include::../parameters/cos.asciidoc[] include::../description/cos.asciidoc[] include::../types/cos.asciidoc[] +include::../examples/cos.asciidoc[] diff --git a/docs/reference/esql/functions/layout/cosh.asciidoc b/docs/reference/esql/functions/layout/cosh.asciidoc index e36a96e0eb324..175b7b23324ce 100644 --- a/docs/reference/esql/functions/layout/cosh.asciidoc +++ b/docs/reference/esql/functions/layout/cosh.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/cosh.svg[Embedded,opts=inline] include::../parameters/cosh.asciidoc[] include::../description/cosh.asciidoc[] include::../types/cosh.asciidoc[] +include::../examples/cosh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sin.asciidoc b/docs/reference/esql/functions/layout/sin.asciidoc index 802045d0a23af..465bbd441f689 100644 --- a/docs/reference/esql/functions/layout/sin.asciidoc +++ b/docs/reference/esql/functions/layout/sin.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/sin.svg[Embedded,opts=inline] include::../parameters/sin.asciidoc[] include::../description/sin.asciidoc[] include::../types/sin.asciidoc[] +include::../examples/sin.asciidoc[] diff --git a/docs/reference/esql/functions/layout/sinh.asciidoc b/docs/reference/esql/functions/layout/sinh.asciidoc index 3ac1f03a608f6..11ae1a8b0403f 100644 --- a/docs/reference/esql/functions/layout/sinh.asciidoc +++ b/docs/reference/esql/functions/layout/sinh.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/sinh.svg[Embedded,opts=inline] include::../parameters/sinh.asciidoc[] include::../description/sinh.asciidoc[] include::../types/sinh.asciidoc[] +include::../examples/sinh.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tan.asciidoc b/docs/reference/esql/functions/layout/tan.asciidoc index 056145f5eed44..2723076238228 100644 --- a/docs/reference/esql/functions/layout/tan.asciidoc +++ b/docs/reference/esql/functions/layout/tan.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/tan.svg[Embedded,opts=inline] include::../parameters/tan.asciidoc[] include::../description/tan.asciidoc[] include::../types/tan.asciidoc[] +include::../examples/tan.asciidoc[] diff --git a/docs/reference/esql/functions/layout/tanh.asciidoc b/docs/reference/esql/functions/layout/tanh.asciidoc index 3024ac5fb2aff..338d8574d4949 100644 --- a/docs/reference/esql/functions/layout/tanh.asciidoc +++ b/docs/reference/esql/functions/layout/tanh.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/tanh.svg[Embedded,opts=inline] include::../parameters/tanh.asciidoc[] include::../description/tanh.asciidoc[] include::../types/tanh.asciidoc[] +include::../examples/tanh.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 5faf994d61db6..28830554198d2 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -37,8 +37,8 @@ include::layout/asin.asciidoc[] include::layout/atan.asciidoc[] include::layout/atan2.asciidoc[] include::layout/ceil.asciidoc[] -include::cos.asciidoc[] -include::cosh.asciidoc[] +include::layout/cos.asciidoc[] +include::layout/cosh.asciidoc[] include::e.asciidoc[] include::floor.asciidoc[] include::log.asciidoc[] @@ -46,9 +46,9 @@ include::log10.asciidoc[] include::pi.asciidoc[] include::pow.asciidoc[] include::round.asciidoc[] -include::sin.asciidoc[] -include::sinh.asciidoc[] +include::layout/sin.asciidoc[] +include::layout/sinh.asciidoc[] include::sqrt.asciidoc[] -include::tan.asciidoc[] -include::tanh.asciidoc[] +include::layout/tan.asciidoc[] +include::layout/tanh.asciidoc[] include::tau.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/abs.asciidoc b/docs/reference/esql/functions/parameters/abs.asciidoc index 8527c7f74bb09..65013f4c21265 100644 --- a/docs/reference/esql/functions/parameters/abs.asciidoc +++ b/docs/reference/esql/functions/parameters/abs.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/acos.asciidoc b/docs/reference/esql/functions/parameters/acos.asciidoc index 2d06f7e70333d..d3fd81343a38b 100644 --- a/docs/reference/esql/functions/parameters/acos.asciidoc +++ b/docs/reference/esql/functions/parameters/acos.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/asin.asciidoc b/docs/reference/esql/functions/parameters/asin.asciidoc index 2d06f7e70333d..d3fd81343a38b 100644 --- a/docs/reference/esql/functions/parameters/asin.asciidoc +++ b/docs/reference/esql/functions/parameters/asin.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/atan.asciidoc b/docs/reference/esql/functions/parameters/atan.asciidoc index 8527c7f74bb09..65013f4c21265 100644 --- a/docs/reference/esql/functions/parameters/atan.asciidoc +++ b/docs/reference/esql/functions/parameters/atan.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/atan2.asciidoc b/docs/reference/esql/functions/parameters/atan2.asciidoc index 8dc744ad03e6a..eceba6fea4217 100644 --- a/docs/reference/esql/functions/parameters/atan2.asciidoc +++ b/docs/reference/esql/functions/parameters/atan2.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `y_coordinate`:: diff --git a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc b/docs/reference/esql/functions/parameters/auto_bucket.asciidoc index 0f9c6a1b81c99..35228377b58de 100644 --- a/docs/reference/esql/functions/parameters/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/parameters/auto_bucket.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc index fb70278c17d1a..c3617b7c0e32c 100644 --- a/docs/reference/esql/functions/parameters/case.asciidoc +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `condition`:: diff --git a/docs/reference/esql/functions/parameters/ceil.asciidoc b/docs/reference/esql/functions/parameters/ceil.asciidoc index 8527c7f74bb09..65013f4c21265 100644 --- a/docs/reference/esql/functions/parameters/ceil.asciidoc +++ b/docs/reference/esql/functions/parameters/ceil.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc index 07c8a84ed5583..9b62a2e7e0d87 100644 --- a/docs/reference/esql/functions/parameters/coalesce.asciidoc +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `first`:: diff --git a/docs/reference/esql/functions/parameters/concat.asciidoc b/docs/reference/esql/functions/parameters/concat.asciidoc index 47a555fbe80c6..f0c9bfa62790c 100644 --- a/docs/reference/esql/functions/parameters/concat.asciidoc +++ b/docs/reference/esql/functions/parameters/concat.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string1`:: diff --git a/docs/reference/esql/functions/parameters/cos.asciidoc b/docs/reference/esql/functions/parameters/cos.asciidoc index eceab83443236..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/cos.asciidoc +++ b/docs/reference/esql/functions/parameters/cos.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -An angle, in radians +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/cosh.asciidoc b/docs/reference/esql/functions/parameters/cosh.asciidoc index 1535b0feb8424..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/cosh.asciidoc +++ b/docs/reference/esql/functions/parameters/cosh.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -The number who's hyperbolic cosine is to be returned +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/date_diff.asciidoc b/docs/reference/esql/functions/parameters/date_diff.asciidoc index 9a9ef6fb34fba..1dbb32235fcfe 100644 --- a/docs/reference/esql/functions/parameters/date_diff.asciidoc +++ b/docs/reference/esql/functions/parameters/date_diff.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `unit`:: diff --git a/docs/reference/esql/functions/parameters/date_extract.asciidoc b/docs/reference/esql/functions/parameters/date_extract.asciidoc index 170bc40d89ef6..8f42ed240abb5 100644 --- a/docs/reference/esql/functions/parameters/date_extract.asciidoc +++ b/docs/reference/esql/functions/parameters/date_extract.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `datePart`:: diff --git a/docs/reference/esql/functions/parameters/date_format.asciidoc b/docs/reference/esql/functions/parameters/date_format.asciidoc index 7b000418b961c..773cbe1b66be5 100644 --- a/docs/reference/esql/functions/parameters/date_format.asciidoc +++ b/docs/reference/esql/functions/parameters/date_format.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `dateFormat`:: diff --git a/docs/reference/esql/functions/parameters/date_parse.asciidoc b/docs/reference/esql/functions/parameters/date_parse.asciidoc index 30a09e43c5361..cd6d432f67884 100644 --- a/docs/reference/esql/functions/parameters/date_parse.asciidoc +++ b/docs/reference/esql/functions/parameters/date_parse.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `datePattern`:: diff --git a/docs/reference/esql/functions/parameters/e.asciidoc b/docs/reference/esql/functions/parameters/e.asciidoc index ddb88c98f7503..25b3c973f1a26 100644 --- a/docs/reference/esql/functions/parameters/e.asciidoc +++ b/docs/reference/esql/functions/parameters/e.asciidoc @@ -1 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* diff --git a/docs/reference/esql/functions/parameters/ends_with.asciidoc b/docs/reference/esql/functions/parameters/ends_with.asciidoc index 314eec2bf39ea..af3640ae29b2c 100644 --- a/docs/reference/esql/functions/parameters/ends_with.asciidoc +++ b/docs/reference/esql/functions/parameters/ends_with.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `str`:: diff --git a/docs/reference/esql/functions/parameters/floor.asciidoc b/docs/reference/esql/functions/parameters/floor.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/floor.asciidoc +++ b/docs/reference/esql/functions/parameters/floor.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/greatest.asciidoc b/docs/reference/esql/functions/parameters/greatest.asciidoc index 55c75eae0de74..83ac29d0bf7c9 100644 --- a/docs/reference/esql/functions/parameters/greatest.asciidoc +++ b/docs/reference/esql/functions/parameters/greatest.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `first`:: diff --git a/docs/reference/esql/functions/parameters/least.asciidoc b/docs/reference/esql/functions/parameters/least.asciidoc index 55c75eae0de74..83ac29d0bf7c9 100644 --- a/docs/reference/esql/functions/parameters/least.asciidoc +++ b/docs/reference/esql/functions/parameters/least.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `first`:: diff --git a/docs/reference/esql/functions/parameters/left.asciidoc b/docs/reference/esql/functions/parameters/left.asciidoc index b296adfc064be..98f4c226456ef 100644 --- a/docs/reference/esql/functions/parameters/left.asciidoc +++ b/docs/reference/esql/functions/parameters/left.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/length.asciidoc b/docs/reference/esql/functions/parameters/length.asciidoc index 4c3a25283c403..5188a4fd5a1cd 100644 --- a/docs/reference/esql/functions/parameters/length.asciidoc +++ b/docs/reference/esql/functions/parameters/length.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/log.asciidoc b/docs/reference/esql/functions/parameters/log.asciidoc index 1d2306c5b215b..3591efb47a9bd 100644 --- a/docs/reference/esql/functions/parameters/log.asciidoc +++ b/docs/reference/esql/functions/parameters/log.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `base`:: diff --git a/docs/reference/esql/functions/parameters/log10.asciidoc b/docs/reference/esql/functions/parameters/log10.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/log10.asciidoc +++ b/docs/reference/esql/functions/parameters/log10.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/ltrim.asciidoc b/docs/reference/esql/functions/parameters/ltrim.asciidoc index 4c3a25283c403..5188a4fd5a1cd 100644 --- a/docs/reference/esql/functions/parameters/ltrim.asciidoc +++ b/docs/reference/esql/functions/parameters/ltrim.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/mv_avg.asciidoc b/docs/reference/esql/functions/parameters/mv_avg.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/mv_avg.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_avg.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/mv_concat.asciidoc b/docs/reference/esql/functions/parameters/mv_concat.asciidoc index 88893478e2b74..8b2c62581d775 100644 --- a/docs/reference/esql/functions/parameters/mv_concat.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_concat.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/mv_count.asciidoc b/docs/reference/esql/functions/parameters/mv_count.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_count.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_count.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_dedupe.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_first.asciidoc b/docs/reference/esql/functions/parameters/mv_first.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_first.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_first.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_last.asciidoc b/docs/reference/esql/functions/parameters/mv_last.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_last.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_last.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_max.asciidoc b/docs/reference/esql/functions/parameters/mv_max.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_max.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_max.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_median.asciidoc b/docs/reference/esql/functions/parameters/mv_median.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/mv_median.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_median.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/mv_min.asciidoc b/docs/reference/esql/functions/parameters/mv_min.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/mv_min.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_min.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_slice.asciidoc b/docs/reference/esql/functions/parameters/mv_slice.asciidoc index cffbfaff95e86..aa40404140e93 100644 --- a/docs/reference/esql/functions/parameters/mv_slice.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_slice.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_sort.asciidoc b/docs/reference/esql/functions/parameters/mv_sort.asciidoc index aee8353cfd416..1ccbf2f0ee0c5 100644 --- a/docs/reference/esql/functions/parameters/mv_sort.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_sort.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/mv_sum.asciidoc b/docs/reference/esql/functions/parameters/mv_sum.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/mv_sum.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_sum.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/mv_zip.asciidoc b/docs/reference/esql/functions/parameters/mv_zip.asciidoc index 09ab5969fe66a..25940864bfdcd 100644 --- a/docs/reference/esql/functions/parameters/mv_zip.asciidoc +++ b/docs/reference/esql/functions/parameters/mv_zip.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string1`:: diff --git a/docs/reference/esql/functions/parameters/pi.asciidoc b/docs/reference/esql/functions/parameters/pi.asciidoc index ddb88c98f7503..25b3c973f1a26 100644 --- a/docs/reference/esql/functions/parameters/pi.asciidoc +++ b/docs/reference/esql/functions/parameters/pi.asciidoc @@ -1 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* diff --git a/docs/reference/esql/functions/parameters/pow.asciidoc b/docs/reference/esql/functions/parameters/pow.asciidoc index 77b3dc186dac7..8e94723086e2e 100644 --- a/docs/reference/esql/functions/parameters/pow.asciidoc +++ b/docs/reference/esql/functions/parameters/pow.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `base`:: diff --git a/docs/reference/esql/functions/parameters/replace.asciidoc b/docs/reference/esql/functions/parameters/replace.asciidoc index f8831e5a6b8c5..331c7425e7189 100644 --- a/docs/reference/esql/functions/parameters/replace.asciidoc +++ b/docs/reference/esql/functions/parameters/replace.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/right.asciidoc b/docs/reference/esql/functions/parameters/right.asciidoc index 3ddd7e7c8cd68..1a05aedf542a9 100644 --- a/docs/reference/esql/functions/parameters/right.asciidoc +++ b/docs/reference/esql/functions/parameters/right.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/round.asciidoc b/docs/reference/esql/functions/parameters/round.asciidoc index ef53d9e07eb00..788c99434fd29 100644 --- a/docs/reference/esql/functions/parameters/round.asciidoc +++ b/docs/reference/esql/functions/parameters/round.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/rtrim.asciidoc b/docs/reference/esql/functions/parameters/rtrim.asciidoc index 4c3a25283c403..5188a4fd5a1cd 100644 --- a/docs/reference/esql/functions/parameters/rtrim.asciidoc +++ b/docs/reference/esql/functions/parameters/rtrim.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/sin.asciidoc b/docs/reference/esql/functions/parameters/sin.asciidoc index eceab83443236..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/sin.asciidoc +++ b/docs/reference/esql/functions/parameters/sin.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -An angle, in radians +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/sinh.asciidoc b/docs/reference/esql/functions/parameters/sinh.asciidoc index d2dfa9701ff89..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/sinh.asciidoc +++ b/docs/reference/esql/functions/parameters/sinh.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -The number to return the hyperbolic sine of +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/split.asciidoc b/docs/reference/esql/functions/parameters/split.asciidoc index 7b3c24adae928..2308f59362d98 100644 --- a/docs/reference/esql/functions/parameters/split.asciidoc +++ b/docs/reference/esql/functions/parameters/split.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/sqrt.asciidoc b/docs/reference/esql/functions/parameters/sqrt.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/sqrt.asciidoc +++ b/docs/reference/esql/functions/parameters/sqrt.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/st_intersects.asciidoc b/docs/reference/esql/functions/parameters/st_intersects.asciidoc index dbc9adf478948..e87a0d0eb94f0 100644 --- a/docs/reference/esql/functions/parameters/st_intersects.asciidoc +++ b/docs/reference/esql/functions/parameters/st_intersects.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `geomA`:: diff --git a/docs/reference/esql/functions/parameters/st_x.asciidoc b/docs/reference/esql/functions/parameters/st_x.asciidoc index d3d26fc981caf..4e8e77dea1f86 100644 --- a/docs/reference/esql/functions/parameters/st_x.asciidoc +++ b/docs/reference/esql/functions/parameters/st_x.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `point`:: diff --git a/docs/reference/esql/functions/parameters/st_y.asciidoc b/docs/reference/esql/functions/parameters/st_y.asciidoc index d3d26fc981caf..4e8e77dea1f86 100644 --- a/docs/reference/esql/functions/parameters/st_y.asciidoc +++ b/docs/reference/esql/functions/parameters/st_y.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `point`:: diff --git a/docs/reference/esql/functions/parameters/starts_with.asciidoc b/docs/reference/esql/functions/parameters/starts_with.asciidoc index 75558cad04106..93a43b3406856 100644 --- a/docs/reference/esql/functions/parameters/starts_with.asciidoc +++ b/docs/reference/esql/functions/parameters/starts_with.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `str`:: diff --git a/docs/reference/esql/functions/parameters/substring.asciidoc b/docs/reference/esql/functions/parameters/substring.asciidoc index 19c4e5551185a..df6aa84d85e87 100644 --- a/docs/reference/esql/functions/parameters/substring.asciidoc +++ b/docs/reference/esql/functions/parameters/substring.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/parameters/tan.asciidoc b/docs/reference/esql/functions/parameters/tan.asciidoc index eceab83443236..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/tan.asciidoc +++ b/docs/reference/esql/functions/parameters/tan.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -An angle, in radians +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/tanh.asciidoc b/docs/reference/esql/functions/parameters/tanh.asciidoc index 1fc97c3b68f84..a1c3f7edf30ce 100644 --- a/docs/reference/esql/functions/parameters/tanh.asciidoc +++ b/docs/reference/esql/functions/parameters/tanh.asciidoc @@ -1,4 +1,6 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* -`number`:: -The number to return the hyperbolic tangent of +`angle`:: +An angle, in radians. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/tau.asciidoc b/docs/reference/esql/functions/parameters/tau.asciidoc index ddb88c98f7503..25b3c973f1a26 100644 --- a/docs/reference/esql/functions/parameters/tau.asciidoc +++ b/docs/reference/esql/functions/parameters/tau.asciidoc @@ -1 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* diff --git a/docs/reference/esql/functions/parameters/to_boolean.asciidoc b/docs/reference/esql/functions/parameters/to_boolean.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_boolean.asciidoc +++ b/docs/reference/esql/functions/parameters/to_boolean.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc +++ b/docs/reference/esql/functions/parameters/to_cartesianpoint.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc +++ b/docs/reference/esql/functions/parameters/to_cartesianshape.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_datetime.asciidoc b/docs/reference/esql/functions/parameters/to_datetime.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_datetime.asciidoc +++ b/docs/reference/esql/functions/parameters/to_datetime.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_degrees.asciidoc b/docs/reference/esql/functions/parameters/to_degrees.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/to_degrees.asciidoc +++ b/docs/reference/esql/functions/parameters/to_degrees.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/to_double.asciidoc b/docs/reference/esql/functions/parameters/to_double.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_double.asciidoc +++ b/docs/reference/esql/functions/parameters/to_double.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_geopoint.asciidoc b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_geopoint.asciidoc +++ b/docs/reference/esql/functions/parameters/to_geopoint.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_geoshape.asciidoc b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_geoshape.asciidoc +++ b/docs/reference/esql/functions/parameters/to_geoshape.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_integer.asciidoc b/docs/reference/esql/functions/parameters/to_integer.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_integer.asciidoc +++ b/docs/reference/esql/functions/parameters/to_integer.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_ip.asciidoc b/docs/reference/esql/functions/parameters/to_ip.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_ip.asciidoc +++ b/docs/reference/esql/functions/parameters/to_ip.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_long.asciidoc b/docs/reference/esql/functions/parameters/to_long.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_long.asciidoc +++ b/docs/reference/esql/functions/parameters/to_long.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_lower.asciidoc b/docs/reference/esql/functions/parameters/to_lower.asciidoc index 4f2e56949be24..e9e9436113786 100644 --- a/docs/reference/esql/functions/parameters/to_lower.asciidoc +++ b/docs/reference/esql/functions/parameters/to_lower.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `str`:: diff --git a/docs/reference/esql/functions/parameters/to_radians.asciidoc b/docs/reference/esql/functions/parameters/to_radians.asciidoc index 9faa6c1adebe2..91c56709d182a 100644 --- a/docs/reference/esql/functions/parameters/to_radians.asciidoc +++ b/docs/reference/esql/functions/parameters/to_radians.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `number`:: diff --git a/docs/reference/esql/functions/parameters/to_string.asciidoc b/docs/reference/esql/functions/parameters/to_string.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_string.asciidoc +++ b/docs/reference/esql/functions/parameters/to_string.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc +++ b/docs/reference/esql/functions/parameters/to_unsigned_long.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/to_upper.asciidoc b/docs/reference/esql/functions/parameters/to_upper.asciidoc index 4f2e56949be24..e9e9436113786 100644 --- a/docs/reference/esql/functions/parameters/to_upper.asciidoc +++ b/docs/reference/esql/functions/parameters/to_upper.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `str`:: diff --git a/docs/reference/esql/functions/parameters/to_version.asciidoc b/docs/reference/esql/functions/parameters/to_version.asciidoc index 56df4f5138a27..8903aa1a472a3 100644 --- a/docs/reference/esql/functions/parameters/to_version.asciidoc +++ b/docs/reference/esql/functions/parameters/to_version.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `field`:: diff --git a/docs/reference/esql/functions/parameters/trim.asciidoc b/docs/reference/esql/functions/parameters/trim.asciidoc index 4c3a25283c403..5188a4fd5a1cd 100644 --- a/docs/reference/esql/functions/parameters/trim.asciidoc +++ b/docs/reference/esql/functions/parameters/trim.asciidoc @@ -1,3 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + *Parameters* `string`:: diff --git a/docs/reference/esql/functions/signature/cos.svg b/docs/reference/esql/functions/signature/cos.svg index ff0484a362aef..4f8ddafaec65b 100644 --- a/docs/reference/esql/functions/signature/cos.svg +++ b/docs/reference/esql/functions/signature/cos.svg @@ -1 +1 @@ -COS(number) \ No newline at end of file +COS(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/cosh.svg b/docs/reference/esql/functions/signature/cosh.svg index 9b9eddd3cb808..11b14d922929a 100644 --- a/docs/reference/esql/functions/signature/cosh.svg +++ b/docs/reference/esql/functions/signature/cosh.svg @@ -1 +1 @@ -COSH(number) \ No newline at end of file +COSH(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sin.svg b/docs/reference/esql/functions/signature/sin.svg index 2c60f0580f8fb..c111a7611ac9e 100644 --- a/docs/reference/esql/functions/signature/sin.svg +++ b/docs/reference/esql/functions/signature/sin.svg @@ -1 +1 @@ -SIN(number) \ No newline at end of file +SIN(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/sinh.svg b/docs/reference/esql/functions/signature/sinh.svg index 16e7ddb6b6534..0bb4ac31dee30 100644 --- a/docs/reference/esql/functions/signature/sinh.svg +++ b/docs/reference/esql/functions/signature/sinh.svg @@ -1 +1 @@ -SINH(number) \ No newline at end of file +SINH(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tan.svg b/docs/reference/esql/functions/signature/tan.svg index c8065b30586cc..f85929a58164b 100644 --- a/docs/reference/esql/functions/signature/tan.svg +++ b/docs/reference/esql/functions/signature/tan.svg @@ -1 +1 @@ -TAN(number) \ No newline at end of file +TAN(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/tanh.svg b/docs/reference/esql/functions/signature/tanh.svg index c2edfe2d6942f..f7b968f8b30c4 100644 --- a/docs/reference/esql/functions/signature/tanh.svg +++ b/docs/reference/esql/functions/signature/tanh.svg @@ -1 +1 @@ -TANH(number) \ No newline at end of file +TANH(angle) \ No newline at end of file diff --git a/docs/reference/esql/functions/sin.asciidoc b/docs/reference/esql/functions/sin.asciidoc deleted file mode 100644 index 6034a695c6071..0000000000000 --- a/docs/reference/esql/functions/sin.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-sin]] -=== `SIN` - -*Syntax* - -[.text-center] -image::esql/functions/signature/sin.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -{wikipedia}/Sine_and_cosine[Sine] trigonometric function. Input expected in -radians. - -include::types/sin.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=sin] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=sin-result] -|=== diff --git a/docs/reference/esql/functions/sinh.asciidoc b/docs/reference/esql/functions/sinh.asciidoc deleted file mode 100644 index 0931b9a2b88e1..0000000000000 --- a/docs/reference/esql/functions/sinh.asciidoc +++ /dev/null @@ -1,30 +0,0 @@ -[discrete] -[[esql-sinh]] -=== `SINH` - -*Syntax* - -[.text-center] -image::esql/functions/signature/sinh.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -{wikipedia}/Hyperbolic_functions[Sine] hyperbolic function. - -include::types/sinh.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=sinh] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=sinh-result] -|=== diff --git a/docs/reference/esql/functions/tan.asciidoc b/docs/reference/esql/functions/tan.asciidoc deleted file mode 100644 index 3b1c446806733..0000000000000 --- a/docs/reference/esql/functions/tan.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-tan]] -=== `TAN` - -*Syntax* - -[.text-center] -image::esql/functions/signature/tan.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -{wikipedia}/Sine_and_cosine[Tangent] trigonometric function. Input expected in -radians. - -include::types/tan.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=tan] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=tan-result] -|=== diff --git a/docs/reference/esql/functions/tanh.asciidoc b/docs/reference/esql/functions/tanh.asciidoc deleted file mode 100644 index 9b47c68c19cf1..0000000000000 --- a/docs/reference/esql/functions/tanh.asciidoc +++ /dev/null @@ -1,30 +0,0 @@ -[discrete] -[[esql-tanh]] -=== `TANH` - -*Syntax* - -[.text-center] -image::esql/functions/signature/tanh.svg[Embedded,opts=inline] - -*Parameters* - -`n`:: -Numeric expression. If `null`, the function returns `null`. - -*Description* - -{wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function. - -include::types/tanh.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/floats.csv-spec[tag=tanh] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/floats.csv-spec[tag=tanh-result] -|=== diff --git a/docs/reference/esql/functions/types/cos.asciidoc b/docs/reference/esql/functions/types/cos.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/cos.asciidoc +++ b/docs/reference/esql/functions/types/cos.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/cosh.asciidoc b/docs/reference/esql/functions/types/cosh.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/cosh.asciidoc +++ b/docs/reference/esql/functions/types/cosh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/sin.asciidoc b/docs/reference/esql/functions/types/sin.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/sin.asciidoc +++ b/docs/reference/esql/functions/types/sin.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/sinh.asciidoc b/docs/reference/esql/functions/types/sinh.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/sinh.asciidoc +++ b/docs/reference/esql/functions/types/sinh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tan.asciidoc b/docs/reference/esql/functions/types/tan.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/tan.asciidoc +++ b/docs/reference/esql/functions/types/tan.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/docs/reference/esql/functions/types/tanh.asciidoc b/docs/reference/esql/functions/types/tanh.asciidoc index 7cda278abdb56..d96a34b678531 100644 --- a/docs/reference/esql/functions/types/tanh.asciidoc +++ b/docs/reference/esql/functions/types/tanh.asciidoc @@ -4,7 +4,7 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -number | result +angle | result double | double integer | double long | double diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 3fdfa7835b036..668ecec0e393d 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -79,7 +79,7 @@ tasks.named("test").configure { into "${rootDir}/docs/reference/esql/functions" include '**/*.asciidoc', '**/*.svg' preserve { - include '/*.asciidoc', '**/*.asciidoc', '**/*.svg' + include '/*.asciidoc', '**/*.asciidoc', '**/*.svg', 'README.md' } } } else { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 7d1617b208f34..9f9aeec7e2838 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -15,8 +15,8 @@ ceil |"double|integer|long|unsigned_long ceil(number:double| cidr_match |"boolean cidr_match(ip:ip, blockX...:keyword|text)" |[ip, blockX] |[ip, "keyword|text"] |["", "CIDR block to test the IP against."] |boolean | "Returns true if the provided IP is contained in one of the provided CIDR blocks." | [false, false] | true | false coalesce |"boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" |first | "boolean|text|integer|keyword|long" | "Expression to evaluate" |"boolean|text|integer|keyword|long" | "Returns the first of its arguments that is not null. If all arguments are null, it returns `null`." | false | true | false concat |"keyword concat(string1:keyword|text, string2...:keyword|text)" |[string1, string2] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Concatenates two or more strings." | [false, false] | true | false -cos |"double cos(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric cosine of an angle" | false | false | false -cosh |"double cosh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number who's hyperbolic cosine is to be returned" |double | "Returns the hyperbolic cosine of a number" | false | false | false +cos |"double cos(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle." | false | false | false +cosh |"double cosh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle." | false | false | false count |"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Column or literal for which to count the number of values." |long | "Returns the total number (count) of input values." | true | false | true count_distinct |"long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" |[field, precision] |["boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, integer"] |["Column or literal for which to count the number of distinct values.", ""] |long | "Returns the approximate number of distinct values." | [false, true] | false | true date_diff |"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)"|[unit, startTimestamp, endTimestamp] |["keyword|text", "date", "date"] |["A valid date unit", "A string representing a start timestamp", "A string representing an end timestamp"] |integer | "Subtract 2 dates and return their difference in multiples of a unit specified in the 1st argument" | [false, false, false] | false | false @@ -59,8 +59,8 @@ replace |"keyword replace(string:keyword|text, regex:keyword|te right |"keyword right(string:keyword|text, length:integer)" |[string, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the right." | [false, false] | false | false round |"double round(number:double, ?decimals:integer)" |[number, decimals] |["double", "integer"] |["The numeric value to round", "The number of decimal places to round to. Defaults to 0."] |double | "Rounds a number to the closest number with the specified number of digits." | [false, true] | false | false rtrim |"keyword|text rtrim(string:keyword|text)" |string |"keyword|text" | "" |"keyword|text" |Removes trailing whitespaces from a string.| false | false | false -sin |"double sin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" |"An angle, in radians" |double |Returns the trigonometric sine of an angle | false | false | false -sinh |"double sinh(number:double|integer|long|unsigned_long)"|number |"double|integer|long|unsigned_long" |"The number to return the hyperbolic sine of" |double | "Returns the hyperbolic sine of a number" | false | false | false +sin |"double sin(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle." | false | false | false +sinh |"double sinh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle." | false | false | false split |"keyword split(string:keyword|text, delim:keyword|text)" |[string, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false sqrt |"double sqrt(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true @@ -70,8 +70,8 @@ st_y |"double st_y(point:geo_point|cartesian_point)" starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false substring |"keyword substring(string:keyword|text, start:integer, ?length:integer)" |[string, start, length] |["keyword|text", "integer", "integer"] |["", "", ""] |keyword | "Returns a substring of a string, specified by a start position and an optional length" | [false, false, true]| false | false sum |"long sum(number:double|integer|long)" |number |"double|integer|long" | "" |long | "The sum of a numeric field." | false | false | true -tan |"double tan(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" |double | "Returns the trigonometric tangent of an angle" | false | false | false -tanh |"double tanh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number to return the hyperbolic tangent of" |double | "Returns the hyperbolic tangent of a number" | false | false | false +tan |"double tan(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle." | false | false | false +tanh |"double tanh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." |double | "Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle." | false | false | false tau |double tau() | null | null | null |double | "The ratio of a circle’s circumference to its radius." | null | false | false to_bool |"boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false to_boolean |"boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" |field |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false @@ -119,8 +119,8 @@ synopsis:keyword "boolean cidr_match(ip:ip, blockX...:keyword|text)" "boolean|text|integer|keyword|long coalesce(first:boolean|text|integer|keyword|long, ?rest...:boolean|text|integer|keyword|long)" "keyword concat(string1:keyword|text, string2...:keyword|text)" -"double cos(number:double|integer|long|unsigned_long)" -"double cosh(number:double|integer|long|unsigned_long)" +"double cos(angle:double|integer|long|unsigned_long)" +"double cosh(angle:double|integer|long|unsigned_long)" "long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" "long count_distinct(field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|version, ?precision:integer)" "integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" @@ -163,8 +163,8 @@ double pi() "keyword right(string:keyword|text, length:integer)" "double round(number:double, ?decimals:integer)" "keyword|text rtrim(string:keyword|text)" -"double sin(number:double|integer|long|unsigned_long)" -"double sinh(number:double|integer|long|unsigned_long)" +"double sin(angle:double|integer|long|unsigned_long)" +"double sinh(angle:double|integer|long|unsigned_long)" "keyword split(string:keyword|text, delim:keyword|text)" "double sqrt(number:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" @@ -174,8 +174,8 @@ double pi() "boolean starts_with(str:keyword|text, prefix:keyword|text)" "keyword substring(string:keyword|text, start:integer, ?length:integer)" "long sum(number:double|integer|long)" -"double tan(number:double|integer|long|unsigned_long)" -"double tanh(number:double|integer|long|unsigned_long)" +"double tan(angle:double|integer|long|unsigned_long)" +"double tanh(angle:double|integer|long|unsigned_long)" double tau() "boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" "boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" @@ -212,9 +212,9 @@ META FUNCTIONS | WHERE STARTS_WITH(name, "sin") ; - name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -sin | "double sin(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "An angle, in radians" | double | "Returns the trigonometric sine of an angle" | false | false | false -sinh | "double sinh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "The number to return the hyperbolic sine of" | "double" | "Returns the hyperbolic sine of a number" | false | false | false +name:keyword | synopsis:keyword |argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean +sin |"double sin(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle." | false | false | false +sinh |"double sinh(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle." | false | false | false ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index e928f1ae2713e..d327956720840 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,12 +22,20 @@ * Cosine trigonometric function. */ public class Cos extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the trigonometric cosine of an angle") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle.", + examples = @Example(file = "floats", tag = "cos") + ) public Cos( Source source, - @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param( + name = "angle", + type = { "double", "integer", "long", "unsigned_long" }, + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index 55250a3ac720f..93170ec4d7540 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,16 +22,20 @@ * Cosine hyperbolic function. */ public class Cosh extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the hyperbolic cosine of a number") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle.", + examples = @Example(file = "floats", tag = "cosh") + ) public Cosh( Source source, @Param( - name = "number", + name = "angle", type = { "double", "integer", "long", "unsigned_long" }, - description = "The number who's hyperbolic cosine is to be returned" - ) Expression n + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index d8b36a3d38856..11cc7bccc2288 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -22,12 +23,20 @@ */ public class Sin extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the trigonometric sine of an angle") + @FunctionInfo( + returnType = "double", + description = "Returns ths {wikipedia}/Sine_and_cosine[Sine] trigonometric function of an angle.", + examples = @Example(file = "floats", tag = "sin") + ) public Sin( Source source, - @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param( + name = "angle", + type = { "double", "integer", "long", "unsigned_long" }, + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index 0c46002f56af6..142f15c8bfbe0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,16 +22,20 @@ * Sine hyperbolic function. */ public class Sinh extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the hyperbolic sine of a number") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle.", + examples = @Example(file = "floats", tag = "sinh") + ) public Sinh( Source source, @Param( - name = "number", + name = "angle", type = { "double", "integer", "long", "unsigned_long" }, - description = "The number to return the hyperbolic sine of" - ) Expression n + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index 002de2ddfc277..3752f986894ed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,12 +22,20 @@ * Tangent trigonometric function. */ public class Tan extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the trigonometric tangent of an angle") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle.", + examples = @Example(file = "floats", tag = "tan") + ) public Tan( Source source, - @Param(name = "number", type = { "double", "integer", "long", "unsigned_long" }, description = "An angle, in radians") Expression n + @Param( + name = "angle", + type = { "double", "integer", "long", "unsigned_long" }, + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index 932677ef0b26d..726a269ebedc5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; @@ -21,16 +22,20 @@ * Tangent hyperbolic function. */ public class Tanh extends AbstractTrigonometricFunction { - @FunctionInfo(returnType = "double", description = "Returns the hyperbolic tangent of a number") + @FunctionInfo( + returnType = "double", + description = "Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle.", + examples = @Example(file = "floats", tag = "tanh") + ) public Tanh( Source source, @Param( - name = "number", + name = "angle", type = { "double", "integer", "long", "unsigned_long" }, - description = "The number to return the hyperbolic tangent of" - ) Expression n + description = "An angle, in radians. If `null`, the function returns `null`." + ) Expression angle ) { - super(source, n); + super(source, angle); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 78c1c57e07782..56869de1b87ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -1149,6 +1149,7 @@ private static void renderTypes(List argNames) throws IOException { private static void renderParametersList(List argNames, List argDescriptions) throws IOException { StringBuilder builder = new StringBuilder(); + builder.append(DOCS_WARNING); builder.append("*Parameters*\n"); for (int a = 0; a < argNames.size(); a++) { builder.append("\n`").append(argNames.get(a)).append("`::\n").append(argDescriptions.get(a)).append('\n'); From adc42ebda2b8a908f2b4d092405ce4d7782b2f4b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 29 Mar 2024 11:20:49 -0700 Subject: [PATCH 24/69] Remove row method from Vector (#106922) We previously introduced the row method for TopN. However, TopN no longer uses this method. We should remove it to prevent potential misuse. --- .../java/org/elasticsearch/compute/data/AbstractVector.java | 5 ----- .../src/main/java/org/elasticsearch/compute/data/Vector.java | 3 --- 2 files changed, 8 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java index 1e1f8bbf2f8df..240a16c6a28c3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java @@ -24,11 +24,6 @@ public final int getPositionCount() { return positionCount; } - @Override - public final Vector getRow(int position) { - return filter(position); - } - @Override public BlockFactory blockFactory() { return blockFactory; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 757e2a5b22145..9461a3e066df3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -28,9 +28,6 @@ public interface Vector extends Accountable, RefCounted, Releasable { */ int getPositionCount(); - // TODO: improve implementation not to waste as much space - Vector getRow(int position); - /** * Creates a new vector that only exposes the positions provided. Materialization of the selected positions is avoided. * @param positions the positions to retain From 2c1e45a0d6e4418b1e643ca84abaa6dd668c8626 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 29 Mar 2024 12:00:49 -0700 Subject: [PATCH 25/69] Fix downsample action request serialization (#106919) This issue exists in 8.10. Closes #106917 --- docs/changelog/106919.yaml | 6 ++++ .../action/downsample/DownsampleAction.java | 16 +++++----- .../test/downsample/10_basic.yml | 31 +++++-------------- 3 files changed, 22 insertions(+), 31 deletions(-) create mode 100644 docs/changelog/106919.yaml diff --git a/docs/changelog/106919.yaml b/docs/changelog/106919.yaml new file mode 100644 index 0000000000000..d8288095590de --- /dev/null +++ b/docs/changelog/106919.yaml @@ -0,0 +1,6 @@ +pr: 106919 +summary: Fix downsample action request serialization +area: Downsampling +type: bug +issues: + - 106917 diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index 08d315fe39ce5..e8e299c58d2eb 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -62,9 +62,11 @@ public Request(StreamInput in) throws IOException { super(in); sourceIndex = in.readString(); targetIndex = in.readString(); - waitTimeout = in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) - ? TimeValue.parseTimeValue(in.readString(), "timeout") - : DEFAULT_WAIT_TIMEOUT; + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { + waitTimeout = TimeValue.parseTimeValue(in.readString(), "timeout"); + } else { + waitTimeout = DEFAULT_WAIT_TIMEOUT; + } downsampleConfig = new DownsampleConfig(in); } @@ -88,11 +90,9 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(sourceIndex); out.writeString(targetIndex); - out.writeString( - out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) - ? waitTimeout.getStringRep() - : DEFAULT_WAIT_TIMEOUT.getStringRep() - ); + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { + out.writeString(waitTimeout.getStringRep()); + } downsampleConfig.writeTo(out); } diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml index 265f97e73c234..95c69efa5b36d 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml +++ b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -46,17 +46,9 @@ setup: multi-counter: type: long time_series_metric: counter - scaled-counter: - type: scaled_float - scaling_factor: 100 - time_series_metric: counter multi-gauge: type: integer time_series_metric: gauge - scaled-gauge: - type: scaled_float - scaling_factor: 100 - time_series_metric: gauge network: properties: tx: @@ -71,21 +63,21 @@ setup: index: test body: - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "multi-counter" : [10, 11, 12], "scaled-counter": 10.0, "multi-gauge": [100, 200, 150], "scaled-gauge": 100.0, "network": {"tx": 2001818691, "rx": 802133794}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "multi-counter" : [10, 11, 12], "multi-gauge": [100, 200, 150], "network": {"tx": 2001818691, "rx": 802133794}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "multi-counter" : [21, 22, 23], "scaled-counter": 20.0, "multi-gauge": [90, 91, 95], "scaled-gauge": 90.0, "network": {"tx": 2005177954, "rx": 801479970}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "multi-counter" : [21, 22, 23], "multi-gauge": [90, 91, 95], "network": {"tx": 2005177954, "rx": 801479970}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "multi-counter" : [1, 5, 10], "scaled-counter": 1.0, "multi-gauge": [103, 110, 109], "scaled-gauge": 104.0, "network": {"tx": 2006223737, "rx": 802337279}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "multi-counter" : [1, 5, 10], "multi-gauge": [103, 110, 109], "network": {"tx": 2006223737, "rx": 802337279}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "multi-counter" : [101, 102, 105], "scaled-counter": 100.0, "multi-gauge": [100, 100, 100], "scaled-gauge": 102.0, "network": {"tx": 2012916202, "rx": 803685721}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "multi-counter" : [101, 102, 105], "multi-gauge": [100, 100, 100], "network": {"tx": 2012916202, "rx": 803685721}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "multi-counter" : [7, 11, 44], "scaled-counter": 7.0, "multi-gauge": [100, 100, 102], "scaled-gauge": 100.0, "network": {"tx": 1434521831, "rx": 530575198}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "multi-counter" : [7, 11, 44], "multi-gauge": [100, 100, 102], "network": {"tx": 1434521831, "rx": 530575198}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "multi-counter" : [0, 0, 1], "scaled-counter": 0.0, "multi-gauge": [101, 102, 102], "scaled-gauge": 101.0, "network": {"tx": 1434577921, "rx": 530600088}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "multi-counter" : [0, 0, 1], "multi-gauge": [101, 102, 102], "network": {"tx": 1434577921, "rx": 530600088}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "multi-counter" : [1000, 1001, 1002], "scaled-counter": 1000.0, "multi-gauge": [99, 100, 110], "scaled-gauge": 99.0, "network": {"tx": 1434587694, "rx": 530604797}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "multi-counter" : [1000, 1001, 1002], "multi-gauge": [99, 100, 110], "network": {"tx": 1434587694, "rx": 530604797}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "multi-counter" : [76, 77, 78], "scaled-counter": 70.0, "multi-gauge": [95, 98, 100], "scaled-gauge": 95.0, "network": {"tx": 1434595272, "rx": 530605511}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "multi-counter" : [76, 77, 78], "multi-gauge": [95, 98, 100], "network": {"tx": 1434595272, "rx": 530605511}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' - do: indices.put_settings: @@ -141,12 +133,5 @@ setup: - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.time_series_metric: gauge } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.type: long } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.time_series_metric: counter } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.type: scaled_float } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.scaling_factor: 100 } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.time_series_metric: counter } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.type: aggregate_metric_double } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.metrics: [ "min", "max", "sum", "value_count" ] } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.default_metric: max } - - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.time_series_metric: gauge } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.type: keyword } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.time_series_dimension: true } From cf9a333d3a4a470af4a219df17762fd611757a9f Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 29 Mar 2024 12:44:31 -0700 Subject: [PATCH 26/69] Remove unused code from example plugin (#106928) --- .../customsuggester/CustomSuggestion.java | 50 +------------------ 1 file changed, 2 insertions(+), 48 deletions(-) diff --git a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java index 402c4c6ef7920..afffd8266c11e 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java +++ b/plugins/examples/custom-suggester/src/main/java/org/elasticsearch/example/customsuggester/CustomSuggestion.java @@ -8,20 +8,15 @@ package org.elasticsearch.example.customsuggester; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.text.Text; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - public class CustomSuggestion extends Suggest.Suggestion { public static final ParseField DUMMY = new ParseField("dummy"); @@ -64,26 +59,8 @@ protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); } - public static CustomSuggestion fromXContent(XContentParser parser, String name) throws IOException { - CustomSuggestion suggestion = new CustomSuggestion(name, -1, null); - parseEntries(parser, suggestion, Entry::fromXContent); - return suggestion; - } - public static class Entry extends Suggest.Suggestion.Entry { - private static final ObjectParser PARSER = new ObjectParser<>("CustomSuggestionEntryParser", true, Entry::new); - - static { - declareCommonFields(PARSER); - PARSER.declareString((entry, dummy) -> entry.dummy = dummy, DUMMY); - /* - * The use of a lambda expression instead of the method reference Entry::addOptions is a workaround for a JDK 14 compiler bug. - * The bug is: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8242214 - */ - PARSER.declareObjectArray((e, o) -> e.addOptions(o), (p, c) -> Option.fromXContent(p), new ParseField(OPTIONS)); - } - private String dummy; public Entry() {} @@ -131,27 +108,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public static Entry fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - public static class Option extends Suggest.Suggestion.Entry.Option { - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "CustomSuggestionObjectParser", true, - args -> { - Text text = new Text((String) args[0]); - float score = (float) args[1]; - String dummy = (String) args[2]; - return new Option(text, score, dummy); - }); - - static { - PARSER.declareString(constructorArg(), TEXT); - PARSER.declareFloat(constructorArg(), SCORE); - PARSER.declareString(constructorArg(), DUMMY); - } - private String dummy; public Option(Text text, float score, String dummy) { @@ -192,10 +150,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(DUMMY.getPreferredName(), dummy); return builder; } - - public static Option fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } } } } From 6b419c13d318a14d33fdfdc9f209654db4035bcc Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Fri, 29 Mar 2024 15:53:16 -0400 Subject: [PATCH 27/69] TransportNodesAction supports async node response (#106733) Gives children classes of TransportNodesAction the option to access to the request listener so as to respond asynchronously to incoming node requests. Relates ES-6685 --- .../support/nodes/TransportNodesAction.java | 28 +++++++++++++------ .../cluster/routing/ShardRouting.java | 2 ++ 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 48036239793c0..daf3334dcaf65 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.CancellableFanOut; +import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.cluster.ClusterState; @@ -216,11 +217,24 @@ protected void newResponseAsync( protected abstract NodeResponse newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException; + /** + * Implements the request recipient logic. + * If access to the request listener is needed, override {@link #nodeOperationAsync(TransportRequest, Task, ActionListener)}. + */ protected abstract NodeResponse nodeOperation(NodeRequest request, Task task); /** - * resolve node ids to concrete nodes of the incoming request - **/ + * This method can be overridden if a subclass needs to access to a listener in order to asynchronously respond to the node request. + * The default implementation is to fall through to {@link #nodeOperation}. + */ + protected void nodeOperationAsync(NodeRequest request, Task task, ActionListener listener) { + ActionListener.respondAndRelease(listener, nodeOperation(request, task)); + } + + /** + * Resolves node ids to concrete nodes of the incoming request. + * NB: if the request's nodeIds() returns nothing, then the request will be sent to ALL known nodes in the cluster. + */ protected void resolveRequest(NodesRequest request, ClusterState clusterState) { assert request.concreteNodes() == null : "request concreteNodes shouldn't be set"; String[] nodesIds = clusterState.nodes().resolveNodes(request.nodesIds()); @@ -230,12 +244,10 @@ protected void resolveRequest(NodesRequest request, ClusterState clusterState) { class NodeTransportHandler implements TransportRequestHandler { @Override public void messageReceived(NodeRequest request, TransportChannel channel, Task task) throws Exception { - final var nodeResponse = nodeOperation(request, task); - try { - channel.sendResponse(nodeResponse); - } finally { - nodeResponse.decRef(); - } + ActionListener.run( + new ChannelActionListener(channel), + channelListener -> nodeOperationAsync(request, task, channelListener) + ); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index cd4a929052a62..95882e26773e5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -33,6 +33,8 @@ /** * {@link ShardRouting} immutably encapsulates information about shard * indexRoutings like id, state, version, etc. + * + * Information about a particular shard instance. */ public final class ShardRouting implements Writeable, ToXContentObject { From f0fa7158868a86c65df540b15e2c081394ff77e2 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 29 Mar 2024 22:21:54 +0100 Subject: [PATCH 28/69] Do not retain whitelist statically in painless plugin (#106913) We only need this single item list as well as the whitelist map once when setting up the script engine. Not holding on to them statically saves about 1.4M of heap. => don't hold on to the list statically => null out the map that we need across method calls (and fix a potential concurrency issue with the existing solution ...). --- .../painless/PainlessPlugin.java | 124 ++++++++++-------- .../elasticsearch/painless/AliasTests.java | 2 +- .../painless/AugmentationTests.java | 2 +- .../painless/BaseClassTests.java | 54 ++++---- .../painless/BasicStatementTests.java | 2 +- .../elasticsearch/painless/BindingsTests.java | 2 +- .../elasticsearch/painless/DebugTests.java | 2 +- .../org/elasticsearch/painless/Debugger.java | 6 +- .../painless/DefBootstrapTests.java | 4 +- .../painless/DynamicTypeTests.java | 2 +- .../org/elasticsearch/painless/EmitTests.java | 2 +- .../elasticsearch/painless/FactoryTests.java | 16 +-- .../painless/NeedsScoreTests.java | 3 +- .../painless/ScriptTestCase.java | 5 +- .../ScriptedMetricAggContextsTests.java | 8 +- .../painless/SimilarityScriptTests.java | 4 +- .../org/elasticsearch/painless/ThisTests.java | 2 +- 17 files changed, 129 insertions(+), 111 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 068821793e44c..c37fe8866fec8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.painless.action.PainlessContextAction; import org.elasticsearch.painless.action.PainlessExecuteAction; @@ -52,71 +53,50 @@ */ public final class PainlessPlugin extends Plugin implements ScriptPlugin, ExtensiblePlugin, ActionPlugin { - private static final Map, List> whitelists; - private static final String[] BASE_WHITELIST_FILES = new String[] { - "org.elasticsearch.txt", - "org.elasticsearch.net.txt", - "org.elasticsearch.script.fields.txt", - "java.lang.txt", - "java.math.txt", - "java.text.txt", - "java.time.txt", - "java.time.chrono.txt", - "java.time.format.txt", - "java.time.temporal.txt", - "java.time.zone.txt", - "java.util.txt", - "java.util.function.txt", - "java.util.regex.txt", - "java.util.stream.txt", - "java.nio.txt" }; - public static final List BASE_WHITELISTS = Collections.singletonList( - WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, BASE_WHITELIST_FILES) - ); - - /* - * Contexts from Core that need custom whitelists can add them to the map below. - * Whitelist resources should be added as appropriately named, separate files - * under Painless' resources - */ - static { - whitelists = new HashMap<>(); + private volatile Map, List> whitelists; - for (ScriptContext context : ScriptModule.CORE_CONTEXTS.values()) { - List contextWhitelists = new ArrayList<>(); - if (PainlessPlugin.class.getResource("org.elasticsearch.script." + context.name.replace('-', '_') + ".txt") != null) { - contextWhitelists.add( - WhitelistLoader.loadFromResourceFiles( - PainlessPlugin.class, - "org.elasticsearch.script." + context.name.replace('-', '_') + ".txt" - ) - ); - } - - whitelists.put(context, contextWhitelists); - } + private final SetOnce painlessScriptEngine = new SetOnce<>(); - List testWhitelists = new ArrayList<>(); - for (ScriptContext context : ScriptModule.CORE_CONTEXTS.values()) { - if (ScriptModule.RUNTIME_FIELDS_CONTEXTS.contains(context) == false) { - testWhitelists.addAll(whitelists.get(context)); - } - } - testWhitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.json.txt")); - whitelists.put(PainlessTestScript.CONTEXT, testWhitelists); + public static List baseWhiteList() { + return List.of( + WhitelistLoader.loadFromResourceFiles( + PainlessPlugin.class, + WhitelistAnnotationParser.BASE_ANNOTATION_PARSERS, + "org.elasticsearch.txt", + "org.elasticsearch.net.txt", + "org.elasticsearch.script.fields.txt", + "java.lang.txt", + "java.math.txt", + "java.text.txt", + "java.time.txt", + "java.time.chrono.txt", + "java.time.format.txt", + "java.time.temporal.txt", + "java.time.zone.txt", + "java.util.txt", + "java.util.function.txt", + "java.util.regex.txt", + "java.util.stream.txt", + "java.nio.txt" + ) + ); } - private final SetOnce painlessScriptEngine = new SetOnce<>(); - @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + final var wl = whitelists; + whitelists = null; + assert wl != null; Map, List> contextsWithWhitelists = new HashMap<>(); + final List baseWhiteList = baseWhiteList(); for (ScriptContext context : contexts) { // we might have a context that only uses the base whitelists, so would not have been filled in by reloadSPI - List mergedWhitelists = new ArrayList<>(BASE_WHITELISTS); - List contextWhitelists = whitelists.get(context); - if (contextWhitelists != null) { - mergedWhitelists.addAll(contextWhitelists); + List contextWhitelists = wl.get(context); + final List mergedWhitelists; + if (contextWhitelists != null && contextWhitelists.isEmpty() == false) { + mergedWhitelists = CollectionUtils.concatLists(baseWhiteList, contextWhitelists); + } else { + mergedWhitelists = baseWhiteList; } contextsWithWhitelists.put(context, mergedWhitelists); } @@ -138,13 +118,43 @@ public List> getSettings() { @Override public void loadExtensions(ExtensionLoader loader) { + final Map, List> whitelistsBuilder = new HashMap<>(); + /* + * Contexts from Core that need custom whitelists can add them to the map below. + * Whitelist resources should be added as appropriately named, separate files + * under Painless' resources + */ + for (ScriptContext context : ScriptModule.CORE_CONTEXTS.values()) { + List contextWhitelists = new ArrayList<>(); + if (PainlessPlugin.class.getResource("org.elasticsearch.script." + context.name.replace('-', '_') + ".txt") != null) { + contextWhitelists.add( + WhitelistLoader.loadFromResourceFiles( + PainlessPlugin.class, + "org.elasticsearch.script." + context.name.replace('-', '_') + ".txt" + ) + ); + } + + whitelistsBuilder.put(context, contextWhitelists); + } + + List testWhitelists = new ArrayList<>(); + for (ScriptContext context : ScriptModule.CORE_CONTEXTS.values()) { + if (ScriptModule.RUNTIME_FIELDS_CONTEXTS.contains(context) == false) { + testWhitelists.addAll(whitelistsBuilder.get(context)); + } + } + testWhitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.json.txt")); + whitelistsBuilder.put(PainlessTestScript.CONTEXT, testWhitelists); loader.loadExtensions(PainlessExtension.class) .stream() .flatMap(extension -> extension.getContextWhitelists().entrySet().stream()) .forEach(entry -> { - List existing = whitelists.computeIfAbsent(entry.getKey(), c -> new ArrayList<>()); + List existing = whitelistsBuilder.computeIfAbsent(entry.getKey(), c -> new ArrayList<>()); existing.addAll(entry.getValue()); }); + + this.whitelists = whitelistsBuilder; } @Override diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java index 2ccc70685e6d7..1d74de8549435 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java @@ -24,7 +24,7 @@ public class AliasTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.alias")); contexts.put(PainlessTestScript.CONTEXT, whitelists); return contexts; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java index e97bd1bb123ca..6d951299b80c6 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AugmentationTests.java @@ -25,7 +25,7 @@ public class AugmentationTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - List digestWhitelist = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List digestWhitelist = new ArrayList<>(PAINLESS_BASE_WHITELIST); digestWhitelist.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.script.ingest.txt")); contexts.put(DigestTestScript.CONTEXT, digestWhitelist); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java index 685080c8d90f6..1acaa83493ee2 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java @@ -28,27 +28,27 @@ public class BaseClassTests extends ScriptTestCase { protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - contexts.put(Gets.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(NoArgs.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(OneArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ArrayArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(PrimitiveArrayArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(DefArrayArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ManyArgs.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(VarArgs.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(DefaultMethods.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsVoid.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsPrimitiveBoolean.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsPrimitiveInt.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsPrimitiveFloat.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ReturnsPrimitiveDouble.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(NoArgsConstant.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(WrongArgsConstant.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(WrongLengthOfArgConstant.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(UnknownArgType.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(UnknownReturnType.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(UnknownArgTypeInArray.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(TwoExecuteMethods.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(Gets.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(NoArgs.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(OneArg.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ArrayArg.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(PrimitiveArrayArg.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(DefArrayArg.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ManyArgs.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(VarArgs.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(DefaultMethods.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsVoid.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsPrimitiveBoolean.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsPrimitiveInt.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsPrimitiveFloat.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ReturnsPrimitiveDouble.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(NoArgsConstant.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(WrongArgsConstant.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(WrongLengthOfArgConstant.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(UnknownArgType.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(UnknownReturnType.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(UnknownArgTypeInArray.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(TwoExecuteMethods.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } @@ -138,7 +138,7 @@ public void testNoArgs() throws Exception { ); assertEquals("cannot resolve symbol [_score]", e.getMessage()); - String debug = Debugger.toString(NoArgs.class, "int i = 0", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(NoArgs.class, "int i = 0", new CompilerSettings(), PAINLESS_BASE_WHITELIST); assertThat(debug, containsString("ACONST_NULL")); assertThat(debug, containsString("ARETURN")); } @@ -377,7 +377,7 @@ public void testReturnsVoid() throws Exception { scriptEngine.compile("testReturnsVoid1", "map.remove('a')", ReturnsVoid.CONTEXT, emptyMap()).newInstance().execute(map); assertEquals(emptyMap(), map); - String debug = Debugger.toString(ReturnsVoid.class, "int i = 0", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsVoid.class, "int i = 0", new CompilerSettings(), PAINLESS_BASE_WHITELIST); // The important thing is that this contains the opcode for returning void assertThat(debug, containsString(" RETURN")); // We shouldn't contain any weird "default to null" logic @@ -434,7 +434,7 @@ public void testReturnsPrimitiveBoolean() throws Exception { .execute() ); - String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings(), PAINLESS_BASE_WHITELIST); assertThat(debug, containsString("ICONST_0")); // The important thing here is that we have the bytecode for returning an integer instead of an object. booleans are integers. assertThat(debug, containsString("IRETURN")); @@ -540,7 +540,7 @@ public void testReturnsPrimitiveInt() throws Exception { scriptEngine.compile("testReturnsPrimitiveInt7", "1 + 1", ReturnsPrimitiveInt.CONTEXT, emptyMap()).newInstance().execute() ); - String debug = Debugger.toString(ReturnsPrimitiveInt.class, "1", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsPrimitiveInt.class, "1", new CompilerSettings(), PAINLESS_BASE_WHITELIST); assertThat(debug, containsString("ICONST_1")); // The important thing here is that we have the bytecode for returning an integer instead of an object assertThat(debug, containsString("IRETURN")); @@ -656,7 +656,7 @@ public void testReturnsPrimitiveFloat() throws Exception { ).newInstance().execute() ); - String debug = Debugger.toString(ReturnsPrimitiveFloat.class, "1f", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsPrimitiveFloat.class, "1f", new CompilerSettings(), PAINLESS_BASE_WHITELIST); assertThat(debug, containsString("FCONST_1")); // The important thing here is that we have the bytecode for returning a float instead of an object assertThat(debug, containsString("FRETURN")); @@ -775,7 +775,7 @@ public void testReturnsPrimitiveDouble() throws Exception { 0 ); - String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings(), PAINLESS_BASE_WHITELIST); // The important thing here is that we have the bytecode for returning a double instead of an object assertThat(debug, containsString("DRETURN")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java index e264085371701..14f30a895911b 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicStatementTests.java @@ -24,7 +24,7 @@ public class BasicStatementTests extends ScriptTestCase { protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - contexts.put(OneArg.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(OneArg.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java index 90c517fbdce2a..a751e9c82ec2a 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java @@ -114,7 +114,7 @@ public interface Factory { @Override protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.test")); InstanceBindingTestClass instanceBindingTestClass = new InstanceBindingTestClass(1); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java index 7b0b3b500d12c..3d539f7e3cb85 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java @@ -28,7 +28,7 @@ public class DebugTests extends ScriptTestCase { private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists( - PainlessPlugin.BASE_WHITELISTS, + PAINLESS_BASE_WHITELIST, new HashMap<>(), new HashMap<>() ); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java index b44be595b4178..b60d8a0fd7ce8 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java @@ -22,12 +22,14 @@ import java.util.HashMap; import java.util.List; +import static org.elasticsearch.painless.ScriptTestCase.PAINLESS_BASE_WHITELIST; + /** quick and dirty tools for debugging */ final class Debugger { /** compiles source to bytecode, and returns debugging output */ static String toString(final String source) { - return toString(PainlessTestScript.class, source, new CompilerSettings(), PainlessPlugin.BASE_WHITELISTS); + return toString(PainlessTestScript.class, source, new CompilerSettings(), PAINLESS_BASE_WHITELIST); } /** compiles to bytecode, and returns debugging output */ @@ -84,7 +86,7 @@ static void phases( PainlessTestScript.class, source, new CompilerSettings(), - PainlessPlugin.BASE_WHITELISTS, + PAINLESS_BASE_WHITELIST, semanticPhaseVisitor, irPhaseVisitor, asmPhaseVisitor diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index ed7fef33302bb..9ad29bbe84f7f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -21,9 +21,11 @@ import java.util.Collections; import java.util.HashMap; +import static org.elasticsearch.painless.ScriptTestCase.PAINLESS_BASE_WHITELIST; + public class DefBootstrapTests extends ESTestCase { private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists( - PainlessPlugin.BASE_WHITELISTS, + PAINLESS_BASE_WHITELIST, new HashMap<>(), new HashMap<>() ); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DynamicTypeTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DynamicTypeTests.java index ffbb7a17137d9..e7f370d4b8a7f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DynamicTypeTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DynamicTypeTests.java @@ -23,7 +23,7 @@ public class DynamicTypeTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.test")); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.dynamic")); contexts.put(PainlessTestScript.CONTEXT, whitelists); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EmitTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EmitTests.java index 344d4aaa822dc..eb1626af454b7 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EmitTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EmitTests.java @@ -21,7 +21,7 @@ public class EmitTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.test")); contexts.put(TestFieldScript.CONTEXT, whitelists); return contexts; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java index eb1a665327258..8d15e0b70d299 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FactoryTests.java @@ -23,14 +23,14 @@ public class FactoryTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - contexts.put(StatefulFactoryTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(FactoryTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(DeterministicFactoryTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(EmptyTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(TemplateScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(VoidReturnTestScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(FactoryTestConverterScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(FactoryTestConverterScriptBadDef.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(StatefulFactoryTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(FactoryTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(DeterministicFactoryTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(EmptyTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(TemplateScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(VoidReturnTestScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(FactoryTestConverterScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(FactoryTestConverterScriptBadDef.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java index 33a66c7564df4..96181458bd496 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java @@ -22,6 +22,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.elasticsearch.painless.ScriptTestCase.PAINLESS_BASE_WHITELIST; /** * Test that needsScores() is reported correctly depending on whether _score is used @@ -33,7 +34,7 @@ public void testNeedsScores() { IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double"); Map, List> contexts = new HashMap<>(); - contexts.put(NumberSortScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(NumberSortScript.CONTEXT, PAINLESS_BASE_WHITELIST); PainlessScriptEngine service = new PainlessScriptEngine(Settings.EMPTY, contexts); SearchExecutionContext searchExecutionContext = index.newSearchExecutionContext(0, 0, null, () -> 0, null, emptyMap()); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 7d9a54b71d5c7..dbcb4e791cdd8 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -34,6 +34,9 @@ * Typically just asserts the output of {@code exec()} */ public abstract class ScriptTestCase extends ESTestCase { + + protected static final List PAINLESS_BASE_WHITELIST = PainlessPlugin.baseWhiteList(); + protected PainlessScriptEngine scriptEngine; @Before @@ -53,7 +56,7 @@ protected Settings scriptEngineSettings() { */ protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.test")); contexts.put(PainlessTestScript.CONTEXT, whitelists); return contexts; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index 3aee371bec453..2d3f09fc7243a 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -34,10 +34,10 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - contexts.put(ScriptedMetricAggContexts.InitScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ScriptedMetricAggContexts.MapScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ScriptedMetricAggContexts.CombineScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(ScriptedMetricAggContexts.ReduceScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.InitScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ScriptedMetricAggContexts.MapScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ScriptedMetricAggContexts.CombineScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(ScriptedMetricAggContexts.ReduceScript.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index 6ad7622b35cac..dbfd1327fb998 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -42,8 +42,8 @@ public class SimilarityScriptTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - contexts.put(SimilarityScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); - contexts.put(SimilarityWeightScript.CONTEXT, PainlessPlugin.BASE_WHITELISTS); + contexts.put(SimilarityScript.CONTEXT, PAINLESS_BASE_WHITELIST); + contexts.put(SimilarityWeightScript.CONTEXT, PAINLESS_BASE_WHITELIST); return contexts; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ThisTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ThisTests.java index 407b36caf1924..cfa62ea33c0ea 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ThisTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ThisTests.java @@ -76,7 +76,7 @@ public interface Factory { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - List whitelists = new ArrayList<>(PainlessPlugin.BASE_WHITELISTS); + List whitelists = new ArrayList<>(PAINLESS_BASE_WHITELIST); whitelists.add(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.this")); contexts.put(ThisScript.CONTEXT, whitelists); return contexts; From 4923f96828611b6c8372384aa13b1282bea761e6 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 29 Mar 2024 17:22:31 -0400 Subject: [PATCH 29/69] Tidy up the geoip Property enum (#106930) --- .../ingest/geoip/GeoIpProcessor.java | 124 ++++++++++-------- .../geoip/GeoIpProcessorFactoryTests.java | 17 +-- .../ingest/geoip/GeoIpProcessorTests.java | 47 +++---- 3 files changed, 101 insertions(+), 87 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 087f0ac9480f8..18ca9599f183c 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Assertions; +import org.elasticsearch.core.Nullable; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; @@ -33,8 +34,8 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; -import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -381,23 +382,21 @@ public GeoIpDatabase get() throws IOException { } public static final class Factory implements Processor.Factory { - static final Set DEFAULT_CITY_PROPERTIES = Set.copyOf( - EnumSet.of( - Property.CONTINENT_NAME, - Property.COUNTRY_NAME, - Property.COUNTRY_ISO_CODE, - Property.REGION_ISO_CODE, - Property.REGION_NAME, - Property.CITY_NAME, - Property.LOCATION - ) - ); - static final Set DEFAULT_COUNTRY_PROPERTIES = Set.copyOf( - EnumSet.of(Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE) + static final Set DEFAULT_CITY_PROPERTIES = Set.of( + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_ISO_CODE, + Property.REGION_ISO_CODE, + Property.REGION_NAME, + Property.CITY_NAME, + Property.LOCATION ); - static final Set DEFAULT_ASN_PROPERTIES = Set.copyOf( - EnumSet.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK) + static final Set DEFAULT_COUNTRY_PROPERTIES = Set.of( + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_ISO_CODE ); + static final Set DEFAULT_ASN_PROPERTIES = Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK); private final GeoIpDatabaseProvider geoIpDatabaseProvider; @@ -457,27 +456,10 @@ public Processor create( } final Set properties; - if (propertyNames != null) { - Set modifiableProperties = EnumSet.noneOf(Property.class); - for (String fieldName : propertyNames) { - try { - modifiableProperties.add(Property.parseProperty(databaseType, fieldName)); - } catch (IllegalArgumentException e) { - throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); - } - } - properties = Set.copyOf(modifiableProperties); - } else { - if (databaseType.endsWith(CITY_DB_SUFFIX)) { - properties = DEFAULT_CITY_PROPERTIES; - } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { - properties = DEFAULT_COUNTRY_PROPERTIES; - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { - properties = DEFAULT_ASN_PROPERTIES; - } else { - assert false : "unsupported database type [" + databaseType + "]"; - properties = Set.of(); - } + try { + properties = Property.parseProperties(databaseType, propertyNames); + } catch (IllegalArgumentException e) { + throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); } return new GeoIpProcessor( processorTag, @@ -518,7 +500,7 @@ enum Property { ORGANIZATION_NAME, NETWORK; - static final EnumSet ALL_CITY_PROPERTIES = EnumSet.of( + static final Set ALL_CITY_PROPERTIES = Set.of( Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, @@ -529,42 +511,70 @@ enum Property { Property.TIMEZONE, Property.LOCATION ); - static final EnumSet ALL_COUNTRY_PROPERTIES = EnumSet.of( + static final Set ALL_COUNTRY_PROPERTIES = Set.of( Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE ); - static final EnumSet ALL_ASN_PROPERTIES = EnumSet.of( - Property.IP, - Property.ASN, - Property.ORGANIZATION_NAME, - Property.NETWORK - ); + static final Set ALL_ASN_PROPERTIES = Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK); + + private static Property parseProperty(Set validProperties, String value) { + try { + Property property = valueOf(value.toUpperCase(Locale.ROOT)); + if (validProperties.contains(property) == false) { + throw new IllegalArgumentException("invalid"); + } + return property; + } catch (IllegalArgumentException e) { + // put the properties in natural order before throwing so that we have reliable error messages -- this is a little + // bit inefficient, but we only do this validation at processor construction time so the cost is practically immaterial + Property[] properties = validProperties.toArray(new Property[0]); + Arrays.sort(properties); + throw new IllegalArgumentException( + "illegal property value [" + value + "]. valid values are " + Arrays.toString(properties) + ); + } + } + + /** + * Parse the given list of property names and validate them against the supplied databaseType. + * + * @param databaseType the type of database to use to validate property names + * @param propertyNames a list of property names to parse, or null to use the default properties for the associated databaseType + * @throws IllegalArgumentException if any of the property names are not valid, or if the databaseType is not valid + * @return a set of parsed and validated properties + */ + public static Set parseProperties(final String databaseType, @Nullable final List propertyNames) { + final Set validProperties; + final Set defaultProperties; - public static Property parseProperty(String databaseType, String value) { - Set validProperties = EnumSet.noneOf(Property.class); if (databaseType.endsWith(CITY_DB_SUFFIX)) { validProperties = ALL_CITY_PROPERTIES; + defaultProperties = Factory.DEFAULT_CITY_PROPERTIES; } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { validProperties = ALL_COUNTRY_PROPERTIES; + defaultProperties = Factory.DEFAULT_COUNTRY_PROPERTIES; } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { validProperties = ALL_ASN_PROPERTIES; + defaultProperties = Factory.DEFAULT_ASN_PROPERTIES; } else { - assert false : "unsupported database type [" + databaseType + "]"; + assert false : "Unsupported database type [" + databaseType + "]"; + throw new IllegalArgumentException("Unsupported database type [" + databaseType + "]"); } - try { - Property property = valueOf(value.toUpperCase(Locale.ROOT)); - if (validProperties.contains(property) == false) { - throw new IllegalArgumentException("invalid"); + final Set properties; + if (propertyNames != null) { + Set modifiableProperties = new HashSet<>(); + for (String propertyName : propertyNames) { + modifiableProperties.add(parseProperty(validProperties, propertyName)); // n.b. this throws if a property is invalid } - return property; - } catch (IllegalArgumentException e) { - throw new IllegalArgumentException( - "illegal property value [" + value + "]. valid values are " + Arrays.toString(validProperties.toArray()) - ); + properties = Set.copyOf(modifiableProperties); + } else { + // if propertyNames is null, then use the default properties for the databaseType + properties = defaultProperties; } + return properties; } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 20e0fa9be6c06..24f2df7e30d16 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.geoip.GeoIpProcessor.Property; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; @@ -37,8 +38,8 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; -import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -185,8 +186,8 @@ public void testBuildWithCountryDbAndAsnFields() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); - EnumSet asnOnlyProperties = EnumSet.copyOf(GeoIpProcessor.Property.ALL_ASN_PROPERTIES); - asnOnlyProperties.remove(GeoIpProcessor.Property.IP); + Set asnOnlyProperties = new HashSet<>(Property.ALL_ASN_PROPERTIES); + asnOnlyProperties.remove(Property.IP); String asnProperty = RandomPicks.randomFrom(Randomness.get(), asnOnlyProperties).toString(); config.put("properties", List.of(asnProperty)); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); @@ -205,8 +206,8 @@ public void testBuildWithAsnDbAndCityFields() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-ASN.mmdb"); - EnumSet cityOnlyProperties = EnumSet.copyOf(GeoIpProcessor.Property.ALL_CITY_PROPERTIES); - cityOnlyProperties.remove(GeoIpProcessor.Property.IP); + Set cityOnlyProperties = new HashSet<>(Property.ALL_CITY_PROPERTIES); + cityOnlyProperties.remove(Property.IP); String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); config.put("properties", List.of(cityProperty)); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); @@ -245,12 +246,12 @@ public void testBuildBuiltinDatabaseMissing() throws Exception { public void testBuildFields() throws Exception { GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseNodeService); - Set properties = EnumSet.noneOf(GeoIpProcessor.Property.class); + Set properties = new HashSet<>(); List fieldNames = new ArrayList<>(); int counter = 0; - int numFields = scaledRandomIntBetween(1, GeoIpProcessor.Property.values().length); - for (GeoIpProcessor.Property property : GeoIpProcessor.Property.ALL_CITY_PROPERTIES) { + int numFields = scaledRandomIntBetween(1, Property.values().length); + for (Property property : Property.ALL_CITY_PROPERTIES) { properties.add(property); fieldNames.add(property.name().toLowerCase(Locale.ROOT)); if (++counter >= numFields) { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index f5ad0e9c0817a..3114d24ee7571 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -14,15 +14,16 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.geoip.GeoIpProcessor.Property; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.io.InputStream; import java.util.Collections; -import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; @@ -35,6 +36,8 @@ public class GeoIpProcessorTests extends ESTestCase { + private static final Set ALL_PROPERTIES = Set.of(Property.values()); + public void testCity() throws Exception { GeoIpProcessor processor = new GeoIpProcessor( randomAlphaOfLength(10), @@ -43,7 +46,7 @@ public void testCity() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -77,7 +80,7 @@ public void testNullValueWithIgnoreMissing() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, true, false, "filename" @@ -99,7 +102,7 @@ public void testNonExistentWithIgnoreMissing() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, true, false, "filename" @@ -118,7 +121,7 @@ public void testNullWithoutIgnoreMissing() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -140,7 +143,7 @@ public void testNonExistentWithoutIgnoreMissing() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -159,7 +162,7 @@ public void testCity_withIpV6() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -197,7 +200,7 @@ public void testCityWithMissingLocation() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -223,7 +226,7 @@ public void testCountry() throws Exception { loader("/GeoLite2-Country.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -252,7 +255,7 @@ public void testCountryWithMissingLocation() throws Exception { loader("/GeoLite2-Country.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -279,7 +282,7 @@ public void testAsn() throws Exception { loader("/GeoLite2-ASN.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -308,7 +311,7 @@ public void testAddressIsNotInTheDatabase() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -332,7 +335,7 @@ public void testInvalid() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -353,7 +356,7 @@ public void testListAllValid() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -383,7 +386,7 @@ public void testListPartiallyValid() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -413,7 +416,7 @@ public void testListNoMatches() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "filename" @@ -433,7 +436,7 @@ public void testListDatabaseReferenceCounting() throws Exception { GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", () -> { loader.preLookup(); return loader; - }, () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename"); + }, () -> true, "target_field", ALL_PROPERTIES, false, false, "filename"); Map document = new HashMap<>(); document.put("source_field", List.of("8.8.8.8", "82.171.64.0")); @@ -464,7 +467,7 @@ public void testListFirstOnly() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, true, "filename" @@ -492,7 +495,7 @@ public void testListFirstOnlyNoMatches() throws Exception { loader("/GeoLite2-City.mmdb"), () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, true, "filename" @@ -514,7 +517,7 @@ public void testInvalidDatabase() throws Exception { loader("/GeoLite2-City.mmdb"), () -> false, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, true, "filename" @@ -537,7 +540,7 @@ public void testNoDatabase() throws Exception { () -> null, () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, false, false, "GeoLite2-City" @@ -560,7 +563,7 @@ public void testNoDatabase_ignoreMissing() throws Exception { () -> null, () -> true, "target_field", - EnumSet.allOf(GeoIpProcessor.Property.class), + ALL_PROPERTIES, true, false, "GeoLite2-City" From c6a0d4f0d775f7cee7a0bce8ec878428b42fa768 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 29 Mar 2024 16:29:16 -0500 Subject: [PATCH 30/69] Pulling KeyValueProcessor.logAndBuildException() into AbstractProcessor (#106931) --- .../ingest/common/KeyValueProcessor.java | 33 +---- .../ingest/AbstractProcessor.java | 36 +++++ .../ingest/AbstractProcessorTests.java | 134 ++++++++++++++++++ 3 files changed, 173 insertions(+), 30 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/ingest/AbstractProcessorTests.java diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java index 0c6e37f675e1d..a7b1efb52efc5 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java @@ -8,9 +8,6 @@ package org.elasticsearch.ingest.common; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.core.Predicates; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; @@ -32,8 +29,6 @@ */ public final class KeyValueProcessor extends AbstractProcessor { - private static final Logger logger = LogManager.getLogger(KeyValueProcessor.class); - public static final String TYPE = "kv"; private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)"); @@ -85,7 +80,7 @@ public final class KeyValueProcessor extends AbstractProcessor { ); } - private static Consumer buildExecution( + private Consumer buildExecution( String fieldSplit, String valueSplit, TemplateScript.Factory field, @@ -170,29 +165,7 @@ private static Consumer buildExecution( }; } - /** - * Helper method for buildTrimmer and buildSplitter. - *

- * If trace logging is enabled, then we should log the stacktrace (and so the message can be slightly simpler). - * On the other hand if trace logging isn't enabled, then we'll need to log some context on the original issue (but not a stacktrace). - *

- * Regardless of the logging level, we should throw an exception that has the context in its message, which this method builds. - */ - private static ElasticsearchException logAndBuildException(String message, Throwable error) { - String cause = error.getClass().getName(); - if (error.getMessage() != null) { - cause += ": " + error.getMessage(); - } - String longMessage = message + ": " + cause; - if (logger.isTraceEnabled()) { - logger.trace(message, error); - } else { - logger.warn(longMessage); - } - return new ElasticsearchException(longMessage); - } - - private static Function buildTrimmer(String trim) { + private Function buildTrimmer(String trim) { if (trim == null) { return val -> val; } else { @@ -207,7 +180,7 @@ private static Function buildTrimmer(String trim) { } } - private static Function buildSplitter(String split, boolean fields) { + private Function buildSplitter(String split, boolean fields) { int limit = fields ? 0 : 2; if (split.length() > 2 || split.length() == 2 && split.charAt(0) != '\\') { Pattern splitPattern = Pattern.compile(split); diff --git a/server/src/main/java/org/elasticsearch/ingest/AbstractProcessor.java b/server/src/main/java/org/elasticsearch/ingest/AbstractProcessor.java index d709e442cac1b..6fb47c57d8811 100644 --- a/server/src/main/java/org/elasticsearch/ingest/AbstractProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/AbstractProcessor.java @@ -8,6 +8,10 @@ package org.elasticsearch.ingest; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; + /** * An Abstract Processor that holds tag and description information * about the processor. @@ -30,4 +34,36 @@ public String getTag() { public String getDescription() { return description; } + + /** + * Helper method to be used by processors that need to catch and log Throwables. + *

+ * If trace logging is enabled, then we log the provided message and the full stacktrace + * On the other hand if trace logging isn't enabled, then we log the provided message and the message from the Throwable (but not a + * stacktrace). + *

+ * Regardless of the logging level, we throw an ElasticsearchException that has the context in its message + * + * @param message A message to be logged and to be included in the message of the returned ElasticsearchException + * @param throwable The Throwable that has been caught + * @return A new ElasticsearchException whose message includes the passed-in message and the message from the passed-in Throwable. It + * will not however wrap the given Throwable. + */ + protected ElasticsearchException logAndBuildException(String message, Throwable throwable) { + String cause = throwable.getClass().getName(); + if (throwable.getMessage() != null) { + cause += ": " + throwable.getMessage(); + } + String longMessage = message + ": " + cause; + // This method will only be called in exceptional situations, so the cost of looking up the logger won't be bad: + Logger logger = LogManager.getLogger(getClass()); + if (logger.isTraceEnabled()) { + logger.trace(message, throwable); + } else { + logger.warn(longMessage); + } + // We don't want to wrap the Throwable here because it is probably not one of the exceptions that ElasticsearchException can + // serialize: + return new ElasticsearchException(longMessage); + } } diff --git a/server/src/test/java/org/elasticsearch/ingest/AbstractProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/AbstractProcessorTests.java new file mode 100644 index 0000000000000..121f6cc5f5dc6 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/ingest/AbstractProcessorTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.Logger; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.spi.LoggerContextFactory; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.test.ESTestCase; +import org.mockito.Mockito; + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AbstractProcessorTests extends ESTestCase { + + public void testLogAndBuildException() { + final LoggerContextFactory originalFactory = LogManager.getFactory(); + try { + final String message = randomAlphaOfLength(100); + final String throwableMessage = randomBoolean() ? null : randomAlphaOfLength(100); + AtomicBoolean warnCalled = new AtomicBoolean(false); + AtomicBoolean traceCalled = new AtomicBoolean(false); + final Throwable throwable = randomFrom( + new StackOverflowError(throwableMessage), + new RuntimeException(throwableMessage), + new IOException(throwableMessage) + ); + + { + // Mock logging so that we can make sure we're logging what we expect: + Logger mockLogger = mock(Logger.class); + doAnswer(invocationOnMock -> { + warnCalled.set(true); + String logMessage = invocationOnMock.getArgument(0, String.class); + assertThat(logMessage, containsString(message)); + if (throwableMessage != null) { + assertThat(logMessage, containsString(throwableMessage)); + } + return null; + }).when(mockLogger).warn(anyString()); + + doAnswer(invocationOnMock -> { + traceCalled.set(true); + String logMessage = invocationOnMock.getArgument(0, String.class); + Throwable logThrowable = invocationOnMock.getArgument(1, Throwable.class); + assertThat(logMessage, containsString(message)); + if (throwableMessage != null) { + assertThat(logMessage, not(containsString(throwableMessage))); + } + assertThat(logThrowable, equalTo(throwable)); + return null; + }).when(mockLogger).trace(anyString(), any(Throwable.class)); + + final LoggerContext context = Mockito.mock(LoggerContext.class); + when(context.getLogger(TestProcessor.class)).thenReturn(mockLogger); + + final LoggerContextFactory spy = Mockito.spy(originalFactory); + Mockito.doReturn(context).when(spy).getContext(any(), any(), any(), anyBoolean()); + LogManager.setFactory(spy); + } + + TestProcessor testProcessor = new TestProcessor(); + + { + // Run with trace logging disabled + ElasticsearchException resultException = testProcessor.logAndBuildException(message, throwable); + assertThat(resultException.getRootCause(), equalTo(resultException)); + String resultMessage = resultException.getMessage(); + assertNotNull(resultMessage); + if (throwableMessage != null) { + assertThat(resultMessage, containsString(throwableMessage)); + } + assertThat(resultMessage, containsString(message)); + + assertThat("log.warn not called", warnCalled.get(), is(true)); + assertThat("log.trace called", traceCalled.get(), is(false)); + } + + // reset between tests: + warnCalled.set(false); + traceCalled.set(false); + + { + // Now enable trace logging + when(LogManager.getLogger(TestProcessor.class).isTraceEnabled()).thenReturn(true); + ElasticsearchException resultException = testProcessor.logAndBuildException(message, throwable); + assertThat(resultException.getRootCause(), equalTo(resultException)); + String resultMessage = resultException.getMessage(); + assertNotNull(resultMessage); + if (throwableMessage != null) { + assertThat(resultMessage, containsString(throwableMessage)); + } + assertThat(resultMessage, containsString(message)); + + assertThat("log.warn called", warnCalled.get(), is(false)); + assertThat("log.trace not called", traceCalled.get(), is(true)); + } + } finally { + LogManager.setFactory(originalFactory); + } + } + + class TestProcessor extends AbstractProcessor { + + protected TestProcessor() { + super("", ""); + + } + + @Override + public String getType() { + return "test"; + } + } +} From ee2733fa4f5f319c534dba80aa615ae8735ec4b2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 29 Mar 2024 14:57:02 -0700 Subject: [PATCH 31/69] Fix MRJAR test tasks when runtime java is set (#106929) The MRJAR test tasks explicitly set the java toolchain to use for execution. This is incompatible with setting the test executable directly, which is what happens when runtime java is set. This commit guards setting the toolchain to only occur when runtime java isn't set, and adds an onlyIf to ensure the test is runnable when runtime java is set. relates #106828 --- .../gradle/internal/MrjarPlugin.java | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 9e2f44323f914..6524247c4c8f6 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -8,8 +8,10 @@ package org.elasticsearch.gradle.internal; +import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.util.GradleUtils; +import org.gradle.api.JavaVersion; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.file.FileCollection; @@ -151,8 +153,18 @@ private void createTestTask(Project project, SourceSet sourceSet, int javaVersio testTask.setClasspath(testRuntime.plus(project.files(jarTask))); testTask.setTestClassesDirs(sourceSet.getOutput().getClassesDirs()); - testTask.getJavaLauncher() - .set(javaToolchains.launcherFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)))); + // only set the jdk if runtime java isn't set because setting the toolchain is incompatible with + // runtime java setting the executable directly + if (BuildParams.getIsRuntimeJavaHomeSet()) { + testTask.onlyIf("runtime java must support java " + javaVersion, t -> { + JavaVersion runtimeJavaVersion = BuildParams.getRuntimeJavaVersion(); + return runtimeJavaVersion.isCompatibleWith(JavaVersion.toVersion(javaVersion)); + }); + } else { + testTask.getJavaLauncher() + .set(javaToolchains.launcherFor(spec -> spec.getLanguageVersion().set(JavaLanguageVersion.of(javaVersion)))); + } + }); project.getTasks().named("check").configure(checkTask -> checkTask.dependsOn(testTaskProvider)); From d125f528136c0c4432046ba9cd56df099e77c365 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 29 Mar 2024 18:35:52 -0700 Subject: [PATCH 32/69] AwaitsFix #106933 --- .../java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 184343349d317..695f5d2a64bc7 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -11,6 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.hamcrest.Matchers; @@ -20,6 +21,7 @@ import static org.hamcrest.CoreMatchers.equalTo; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106933") public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { From 7af3c8db016bc97d45cabbcd1dd7d2ec30cc166e Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Sat, 30 Mar 2024 11:13:05 -0700 Subject: [PATCH 33/69] AwaitsFix #106871 --- .../org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index 8c06ccf750d36..1f4830d8b6d0c 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -176,6 +176,7 @@ public void testWithUsers() throws Exception { * the testWithUsers test is generally testing). * @throws IOException */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106871") public void testStatusWithUsersWhileSearchIsRunning() throws IOException { String user = randomFrom("user1", "user2"); String other = user.equals("user1") ? "user2" : "user1"; From 7f83189cb24bf41ddea13126385c47302ea9f9b3 Mon Sep 17 00:00:00 2001 From: David Turner Date: Sat, 30 Mar 2024 19:23:27 +0000 Subject: [PATCH 34/69] Remove executor lookups from `TransportWriteAction` (#106938) Replaces the `String` names (plus associated threadpool) with proper `Executor` instances. Relates #106279 (removes a couple more usages of `SAME`) Relates #105460, #99787, #97879 etc. --- .../action/bulk/TransportShardBulkAction.java | 15 +++++-------- .../TransportResyncReplicationAction.java | 2 +- .../replication/TransportWriteAction.java | 11 +++++----- .../index/seqno/RetentionLeaseSyncAction.java | 21 ++++++++++++++++++- .../indices/ExecutorSelector.java | 13 ++++++------ .../bulk/TransportShardBulkActionTests.java | 18 ++++++---------- .../TransportWriteActionTests.java | 5 +++-- .../ESIndexLevelReplicationTestCase.java | 3 +-- .../TransportBulkShardOperationsAction.java | 2 +- .../authz/AuthorizationServiceTests.java | 3 +-- 10 files changed, 51 insertions(+), 42 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 5c1f0e4aa7306..4cf10b3c27824 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -115,7 +115,7 @@ public TransportShardBulkAction( actionFilters, BulkShardRequest::new, BulkShardRequest::new, - ExecutorSelector::getWriteExecutorForShard, + ExecutorSelector.getWriteExecutorForShard(threadPool), false, indexingPressure, systemIndices @@ -167,7 +167,7 @@ public void onClusterServiceClose() { public void onTimeout(TimeValue timeout) { mappingUpdateListener.onFailure(new MapperException("timed out while waiting for a dynamic mapping update")); } - }), listener, threadPool, executor(primary), postWriteRefresh, postWriteAction, documentParsingProvider); + }), listener, executor(primary), postWriteRefresh, postWriteAction, documentParsingProvider); } @Override @@ -188,8 +188,7 @@ public static void performOnPrimary( MappingUpdatePerformer mappingUpdater, Consumer> waitForMappingUpdate, ActionListener> listener, - ThreadPool threadPool, - String executorName + Executor executor ) { performOnPrimary( request, @@ -199,8 +198,7 @@ public static void performOnPrimary( mappingUpdater, waitForMappingUpdate, listener, - threadPool, - executorName, + executor, null, null, DocumentParsingProvider.EMPTY_INSTANCE @@ -215,16 +213,13 @@ public static void performOnPrimary( MappingUpdatePerformer mappingUpdater, Consumer> waitForMappingUpdate, ActionListener> listener, - ThreadPool threadPool, - String executorName, + Executor executor, @Nullable PostWriteRefresh postWriteRefresh, @Nullable Consumer postWriteAction, DocumentParsingProvider documentParsingProvider ) { new ActionRunnable<>(listener) { - private final Executor executor = threadPool.executor(executorName); - private final BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(request, primary); final long startBulkTime = System.nanoTime(); diff --git a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java index 9a08da40ca282..4684c990299f9 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java @@ -70,7 +70,7 @@ public TransportResyncReplicationAction( actionFilters, ResyncReplicationRequest::new, ResyncReplicationRequest::new, - ExecutorSelector::getWriteExecutorForShard, + ExecutorSelector.getWriteExecutorForShard(threadPool), true, /* we should never reject resync because of thread pool capacity on primary */ indexingPressure, systemIndices diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java index ea24d7deb9aa7..8994b428adcbe 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -61,7 +62,7 @@ public abstract class TransportWriteAction< protected final ExecutorSelector executorSelector; protected final PostWriteRefresh postWriteRefresh; - private final BiFunction executorFunction; + private final BiFunction executorFunction; protected TransportWriteAction( Settings settings, @@ -74,7 +75,7 @@ protected TransportWriteAction( ActionFilters actionFilters, Writeable.Reader request, Writeable.Reader replicaRequest, - BiFunction executorFunction, + BiFunction executorFunction, boolean forceExecutionOnPrimary, IndexingPressure indexingPressure, SystemIndices systemIndices @@ -103,7 +104,7 @@ protected TransportWriteAction( this.postWriteRefresh = new PostWriteRefresh(transportService); } - protected String executor(IndexShard shard) { + protected Executor executor(IndexShard shard) { return executorFunction.apply(executorSelector, shard); } @@ -210,7 +211,7 @@ protected void shardOperationOnPrimary( IndexShard primary, ActionListener> listener ) { - threadPool.executor(executorFunction.apply(executorSelector, primary)).execute(new ActionRunnable<>(listener) { + executorFunction.apply(executorSelector, primary).execute(new ActionRunnable<>(listener) { @Override protected void doRun() { dispatchedShardOperationOnPrimary(request, primary, listener); @@ -238,7 +239,7 @@ protected abstract void dispatchedShardOperationOnPrimary( */ @Override protected void shardOperationOnReplica(ReplicaRequest request, IndexShard replica, ActionListener listener) { - threadPool.executor(executorFunction.apply(executorSelector, replica)).execute(new ActionRunnable<>(listener) { + executorFunction.apply(executorSelector, replica).execute(new ActionRunnable<>(listener) { @Override protected void doRun() { dispatchedShardOperationOnReplica(request, replica, listener); diff --git a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java index d03a29922da07..d69dbc00ff7e6 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/RetentionLeaseSyncAction.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.shard.IndexShardClosedException; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotInPrimaryModeException; +import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.node.NodeClosedException; @@ -50,6 +51,7 @@ import java.util.Map; import java.util.Objects; import java.util.concurrent.Executor; +import java.util.function.BiFunction; import static org.elasticsearch.core.Strings.format; @@ -88,7 +90,7 @@ public RetentionLeaseSyncAction( actionFilters, RetentionLeaseSyncAction.Request::new, RetentionLeaseSyncAction.Request::new, - (service, ignore) -> ThreadPool.Names.MANAGEMENT, + new ManagementOnlyExecutorFunction(threadPool), false, indexingPressure, systemIndices @@ -263,4 +265,21 @@ protected Response newResponseInstance(StreamInput in) throws IOException { return new Response(in); } + /** + * A {@code BiFunction} for passing to the super constructor which always returns the + * MANAGEMENT executor (but looks it up once at construction time and caches the result, unlike how the obvious lambda would work). + */ + private static class ManagementOnlyExecutorFunction implements BiFunction { + private final Executor executor; + + ManagementOnlyExecutorFunction(ThreadPool threadPool) { + executor = threadPool.executor(ThreadPool.Names.MANAGEMENT); + } + + @Override + public Executor apply(ExecutorSelector executorSelector, IndexShard indexShard) { + return executor; + } + } + } diff --git a/server/src/main/java/org/elasticsearch/indices/ExecutorSelector.java b/server/src/main/java/org/elasticsearch/indices/ExecutorSelector.java index a92a451e58eec..d5969702ba402 100644 --- a/server/src/main/java/org/elasticsearch/indices/ExecutorSelector.java +++ b/server/src/main/java/org/elasticsearch/indices/ExecutorSelector.java @@ -12,6 +12,8 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Objects; +import java.util.concurrent.Executor; +import java.util.function.BiFunction; /** * Some operations need to use different executors for different index patterns. @@ -93,12 +95,11 @@ public String executorForWrite(String indexName) { /** * This is a convenience method for the case when we need to find an executor for a shard. - * Note that it can be passed to methods as a {@link java.util.function.BiFunction}. - * @param executorSelector An executor selector service. - * @param shard A shard for which we need to find an executor. - * @return Name of the executor that should be used for write operations on this shard. + * @return a {@link java.util.function.BiFunction} which returns the executor that should be used for write operations on this shard. */ - public static String getWriteExecutorForShard(ExecutorSelector executorSelector, IndexShard shard) { - return executorSelector.executorForWrite(shard.shardId().getIndexName()); + public static BiFunction getWriteExecutorForShard(ThreadPool threadPool) { + return (executorSelector, indexShard) -> threadPool.executor( + executorSelector.executorForWrite(indexShard.shardId().getIndexName()) + ); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index 4660e1e4ea97d..31e1a66c8ca44 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -241,8 +241,7 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { throw new AssertionError(e); } }), latch::countDown), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); latch.await(); @@ -936,8 +935,7 @@ public void testRetries() throws Exception { assertThat(response.status(), equalTo(RestStatus.CREATED)); assertThat(response.getSeqNo(), equalTo(13L)); }), latch), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); latch.await(); } @@ -1026,8 +1024,7 @@ public void testForceExecutionOnRejectionAfterMappingUpdate() throws Exception { new LatchedActionListener<>(ActionTestUtils.assertNoFailureListener(result -> // Assert that we still need to fsync the location that was successfully written assertThat(((WritePrimaryResult) result).location, equalTo(resultLocation1))), latch), - rejectingThreadPool, - Names.WRITE + rejectingThreadPool.executor(Names.WRITE) ); latch.await(); @@ -1098,8 +1095,7 @@ public void testPerformOnPrimaryReportsBulkStats() throws Exception { closeShards(shard); } }), latch), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); latch.await(); @@ -1148,8 +1144,7 @@ public void testNoopMappingUpdateInfiniteLoopPrevention() throws Exception { (update, shardId, listener) -> fail("the master should not be contacted as the operation yielded a noop mapping update"), listener -> listener.onResponse(null), ActionTestUtils.assertNoFailureListener(result -> {}), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ) ); assertThat( @@ -1219,8 +1214,7 @@ public void testNoopMappingUpdateSuccessOnRetry() throws Exception { BulkItemResponse primaryResponse = result.replicaRequest().items()[0].getPrimaryResponse(); assertFalse(primaryResponse.isFailed()); }), latch), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); latch.await(); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index f4c8e2baa94cf..5530ec61fea33 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Strings; import org.elasticsearch.index.Index; @@ -430,7 +431,7 @@ protected TestAction(boolean withDocumentFailureOnPrimary, boolean withDocumentF new ActionFilters(new HashSet<>()), TestRequest::new, TestRequest::new, - (service, ignore) -> ThreadPool.Names.SAME, + (service, ignore) -> EsExecutors.DIRECT_EXECUTOR_SERVICE, false, new IndexingPressure(Settings.EMPTY), EmptySystemIndices.INSTANCE @@ -458,7 +459,7 @@ protected TestAction( new ActionFilters(new HashSet<>()), TestRequest::new, TestRequest::new, - (service, ignore) -> ThreadPool.Names.SAME, + (service, ignore) -> EsExecutors.DIRECT_EXECUTOR_SERVICE, false, new IndexingPressure(settings), EmptySystemIndices.INSTANCE diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index ba6d7e441ef4a..6157d6997641d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -895,8 +895,7 @@ private void executeShardBulkOnPrimary( ); listener.onResponse((TransportWriteAction.WritePrimaryResult) result); }), - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); } catch (Exception e) { listener.onFailure(e); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java index 3e2f5710c1507..d9592c3df4950 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/TransportBulkShardOperationsAction.java @@ -66,7 +66,7 @@ public TransportBulkShardOperationsAction( actionFilters, BulkShardOperationsRequest::new, BulkShardOperationsRequest::new, - ExecutorSelector::getWriteExecutorForShard, + ExecutorSelector.getWriteExecutorForShard(threadPool), false, indexingPressure, systemIndices diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 2cc6c7d569f44..4330dc3171047 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -1583,8 +1583,7 @@ public void testDenialErrorMessagesForBulkIngest() throws Exception { mappingUpdater, waitForMappingUpdate, future, - threadPool, - Names.WRITE + threadPool.executor(Names.WRITE) ); TransportReplicationAction.PrimaryResult result = future.get(); From ddd7c5352ec4cfaf07748a540f6e249e89a982a1 Mon Sep 17 00:00:00 2001 From: David Turner Date: Sat, 30 Mar 2024 19:33:22 +0000 Subject: [PATCH 35/69] Remove executor lookups from `TransportInstanceSingleOperationAction` (#106940) Replaces the `String` names (plus associated threadpool) with proper `Executor` instances. Relates #106279 (removes another usage of `SAME`) Relates #106938, #105460, #99787, #97879 etc. --- .../TransportInstanceSingleOperationAction.java | 8 +++++--- .../action/update/TransportUpdateAction.java | 10 +++++----- .../TransportInstanceSingleOperationActionTests.java | 6 ++++-- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index 80b7a95bbe0de..fb62f0a2b1d61 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -43,6 +43,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.concurrent.Executor; import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.EXCLUDED_DATA_STREAMS_KEY; @@ -81,7 +82,7 @@ protected void doExecute(Task task, Request request, ActionListener li new AsyncSingleAction(request, listener).start(); } - protected abstract String executor(ShardId shardId); + protected abstract Executor executor(ShardId shardId); protected abstract void shardOperation(Request request, ActionListener listener); @@ -259,7 +260,8 @@ public void onTimeout(TimeValue timeout) { } private void handleShardRequest(Request request, TransportChannel channel, Task task) { - threadPool.executor(executor(request.shardId)) - .execute(ActionRunnable.wrap(new ChannelActionListener(channel), l -> shardOperation(request, l))); + executor(request.shardId).execute( + ActionRunnable.wrap(new ChannelActionListener(channel), l -> shardOperation(request, l)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 63ae56bfbd047..b899d68107975 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -51,7 +51,7 @@ import java.io.IOException; import java.util.Map; -import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executor; import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.action.bulk.TransportBulkAction.unwrappingSingleItemBulkResponse; @@ -88,9 +88,9 @@ public TransportUpdateAction( } @Override - protected String executor(ShardId shardId) { + protected Executor executor(ShardId shardId) { final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); - return indexService.getIndexSettings().getIndexMetadata().isSystem() ? Names.SYSTEM_WRITE : Names.WRITE; + return threadPool.executor(indexService.getIndexSettings().getIndexMetadata().isSystem() ? Names.SYSTEM_WRITE : Names.WRITE); } @Override @@ -321,9 +321,9 @@ private void handleUpdateFailureWithRetry( request.id() ); - final ExecutorService executor; + final Executor executor; try { - executor = threadPool.executor(executor(request.getShardId())); + executor = executor(request.getShardId()); } catch (Exception e) { // might fail if shard no longer exists locally, in which case we cannot retry e.addSuppressed(versionConflictEngineException); diff --git a/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index 4c29b10575fae..b071e26a1fecd 100644 --- a/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; @@ -52,6 +53,7 @@ import java.util.HashSet; import java.util.Map; import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -114,8 +116,8 @@ public Map getResults() { } @Override - protected String executor(ShardId shardId) { - return ThreadPool.Names.SAME; + protected Executor executor(ShardId shardId) { + return EsExecutors.DIRECT_EXECUTOR_SERVICE; } @Override From b676d380503e3336160927a35ee19d1f4efe96e4 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Sun, 31 Mar 2024 00:17:20 +0100 Subject: [PATCH 36/69] Update mixed cluster test skip version for downsampling (#106942) All nodes on the mixed cluster need to be at least on version 8.10 since PR #97557 introduced execution of downsampling tasks using the persisten task framework which is incompatible with how execution was coordinated before. --- x-pack/plugin/downsample/qa/mixed-cluster/build.gradle | 2 +- .../resources/rest-api-spec/test/downsample/10_basic.yml | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle index 2449991a8e1e0..61aa2927e46de 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -26,7 +26,7 @@ restResources { } def supportedVersion = bwcVersion -> { - return bwcVersion.onOrAfter("8.8.0"); + return bwcVersion.onOrAfter("8.10.0"); } BuildParams.bwcVersions.withWireCompatible(supportedVersion) { bwcVersion, baseName -> diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml index 95c69efa5b36d..2362f21d77d86 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml +++ b/x-pack/plugin/downsample/qa/mixed-cluster/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -1,7 +1,7 @@ setup: - skip: - version: " - 8.4.99" - reason: "rollup renamed to downsample in 8.5.0" + version: " - 8.9.99" + reason: "Downsampling executed using persistent task framework from version 8.10" - do: indices.create: @@ -87,6 +87,9 @@ setup: --- "Downsample index": + - skip: + version: " - 8.9.99" + reason: "Downsampling executed using persistent task framework from version 8.10" - do: indices.downsample: From 60d5083b5f1c632f01ddc0fc7271eebd52666f6e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sun, 31 Mar 2024 21:49:30 +0200 Subject: [PATCH 37/69] Remove some more ActionType subclasses (#106947) Cleaning up a couple more of these from the server module. --- .../datastreams/TSDBIndexingIT.java | 6 +- .../lifecycle/DataStreamLifecycleService.java | 4 +- .../system/indices/SystemIndicesQA.java | 2 +- .../action/IndicesRequestIT.java | 24 +- .../admin/cluster/node/tasks/TasksIT.java | 28 +-- .../diskusage/IndexDiskUsageAnalyzerIT.java | 41 ++-- .../indices/recovery/IndexRecoveryIT.java | 4 +- .../elasticsearch/action/ActionModule.java | 24 +- .../node/tasks/cancel/CancelTasksAction.java | 25 --- .../cancel/CancelTasksRequestBuilder.java | 2 +- .../cancel/TransportCancelTasksAction.java | 7 +- .../cluster/node/tasks/get/GetTaskAction.java | 25 --- .../node/tasks/get/GetTaskRequestBuilder.java | 2 +- .../tasks/get/TransportGetTaskAction.java | 8 +- .../cleanup/CleanupRepositoryAction.java | 20 -- .../CleanupRepositoryRequestBuilder.java | 2 +- .../TransportCleanupRepositoryAction.java | 4 +- .../cache/clear/ClearIndicesCacheAction.java | 22 -- .../ClearIndicesCacheRequestBuilder.java | 2 +- .../TransportClearIndicesCacheAction.java | 4 +- .../indices/create/CreateIndexAction.java | 22 -- .../create/CreateIndexRequestBuilder.java | 4 +- .../create/TransportCreateIndexAction.java | 4 +- .../AnalyzeIndexDiskUsageAction.java | 20 -- .../AnalyzeIndexDiskUsageRequest.java | 2 +- .../TransportAnalyzeIndexDiskUsageAction.java | 4 +- .../indices/readonly/AddIndexBlockAction.java | 21 -- .../readonly/AddIndexBlockRequestBuilder.java | 2 +- .../TransportAddIndexBlockAction.java | 4 +- .../TransportVerifyShardIndexBlockAction.java | 5 +- .../elasticsearch/action/bulk/BulkAction.java | 22 -- .../action/bulk/BulkRequestBuilder.java | 2 +- .../action/bulk/TransportBulkAction.java | 4 +- .../action/bulk/TransportShardBulkAction.java | 2 +- .../action/search/SearchTransportService.java | 4 +- .../internal/support/AbstractClient.java | 38 ++-- .../action/RestCancellableNodeClient.java | 2 +- .../RestAnalyzeIndexDiskUsageAction.java | 4 +- .../tasks/TaskResultsService.java | 2 +- .../node/tasks/CancellableTasksTests.java | 4 +- .../cluster/node/tasks/TestTaskPlugin.java | 2 +- .../bulk/TransportBulkActionIngestTests.java | 8 +- .../AbstractClientHeadersTestCase.java | 17 +- .../RestCancellableNodeClientTests.java | 4 +- .../snapshots/SnapshotResiliencyTests.java | 9 +- .../xpack/core/ilm/ReadOnlyStep.java | 4 +- .../privilege/ClusterPrivilegeResolver.java | 7 +- .../authz/privilege/IndexPrivilege.java | 4 +- .../core/security/user/InternalUsers.java | 6 +- .../notifications/AbstractAuditorTests.java | 4 +- .../authz/permission/LimitedRoleTests.java | 28 +-- .../authz/privilege/PrivilegeTests.java | 6 +- .../authz/store/ReservedRolesStoreTests.java | 209 +++++++++++++----- .../security/user/InternalUsersTests.java | 38 ++-- .../xpack/enrich/BasicEnrichTests.java | 4 +- .../enrich/EnrichPolicyExecutorTests.java | 4 +- .../xpack/enrich/EnrichPolicyRunnerTests.java | 4 +- .../action/CrossClustersCancellationIT.java | 4 +- .../xpack/esql/action/EsqlActionTaskIT.java | 6 +- .../ilm/history/ILMHistoryStoreTests.java | 12 +- .../integration/ModelSnapshotRetentionIT.java | 4 +- .../ml/integration/ModelSnapshotSearchIT.java | 4 +- .../ml/integration/AnnotationIndexIT.java | 4 +- .../action/TransportDeleteFilterAction.java | 36 +-- .../ml/action/TransportDeleteJobAction.java | 4 +- .../TransportPostCalendarEventsAction.java | 4 +- .../ml/action/TransportResetJobAction.java | 4 +- .../TransportRevertModelSnapshotAction.java | 4 +- .../TransportUpdateModelSnapshotAction.java | 26 ++- .../xpack/ml/dataframe/DestinationIndex.java | 4 +- .../persistence/TrainedModelProvider.java | 4 +- .../job/persistence/JobResultsProvider.java | 4 +- .../persistence/ResultsPersisterService.java | 4 +- ...ransportDeleteTrainedModelActionTests.java | 8 +- .../annotations/AnnotationPersisterTests.java | 22 +- .../xpack/ml/datafeed/DatafeedJobTests.java | 8 +- .../ml/dataframe/DestinationIndexTests.java | 4 +- .../TrainedModelProviderTests.java | 4 +- .../persistence/JobResultsPersisterTests.java | 18 +- .../ResultsPersisterServiceTests.java | 22 +- .../profiling/ProfilingIndexManagerTests.java | 8 +- .../action/TransportPutRollupJobAction.java | 4 +- .../xpack/rollup/job/RollupJobTask.java | 4 +- .../action/PutJobStateMachineTests.java | 10 +- .../BlobStoreCacheMaintenanceService.java | 4 +- .../authc/apikey/ApiKeySingleNodeTests.java | 4 +- .../security/authz/IndexAliasesTests.java | 4 +- .../security/authz/WriteActionsTests.java | 20 +- .../xpack/security/authc/ApiKeyService.java | 3 +- .../IndexServiceAccountTokenStore.java | 3 +- .../security/authz/AuthorizationUtils.java | 2 +- .../xpack/security/authz/RBACEngine.java | 4 +- .../security/profile/ProfileService.java | 3 +- ...sportSamlInvalidateSessionActionTests.java | 4 +- .../security/authc/ApiKeyServiceTests.java | 6 +- .../service/ElasticServiceAccountsTests.java | 32 +-- .../authz/AuthorizationServiceTests.java | 39 ++-- .../authz/AuthorizationUtilsTests.java | 2 +- .../authz/store/CompositeRolesStoreTests.java | 5 +- .../security/profile/ProfileServiceTests.java | 4 +- .../history/SnapshotHistoryStoreTests.java | 4 +- .../xpack/spatial/SpatialDiskUsageIT.java | 4 +- .../transform/persistence/TransformIndex.java | 4 +- .../transforms/ClientTransformIndexer.java | 4 +- .../persistence/TransformIndexTests.java | 4 +- .../execution/TriggeredWatchStoreTests.java | 4 +- 106 files changed, 569 insertions(+), 620 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java delete mode 100644 server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 772cc0f98d757..24c373df72144 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -8,8 +8,8 @@ package org.elasticsearch.datastreams; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageRequest; +import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -468,7 +468,7 @@ public void testTrimId() throws Exception { // Pre check whether _id stored field uses diskspace: var diskUsageResponse = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { dataStreamName }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); var map = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(diskUsageResponse), false); @@ -510,7 +510,7 @@ public void testTrimId() throws Exception { // Check the _id stored field uses no disk space: diskUsageResponse = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { dataStreamName }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); map = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(diskUsageResponse), false); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index d1dd008e27977..52753f00a39c1 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -19,9 +19,9 @@ import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; @@ -738,7 +738,7 @@ private void addIndexBlockOnce(String indexName) { transportActionsDeduplicator.executeOnce( addIndexBlockRequest, new ErrorRecordingActionListener( - AddIndexBlockAction.NAME, + TransportAddIndexBlockAction.TYPE.name(), indexName, errorStore, Strings.format("Data stream lifecycle service encountered an error trying to mark index [%s] as readonly", indexName), diff --git a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java index b86aefb12a956..9fc256e79873e 100644 --- a/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java +++ b/qa/system-indices/src/main/java/org/elasticsearch/system/indices/SystemIndicesQA.java @@ -40,7 +40,7 @@ import java.util.function.Predicate; import java.util.function.Supplier; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index 3b34cedcd3635..920677e8c4b4a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; +import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -42,8 +42,8 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.explain.TransportExplainAction; @@ -206,7 +206,7 @@ public void testAnalyze() { } public void testIndex() { - String[] indexShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" }; + String[] indexShardActions = new String[] { TransportBulkAction.NAME + "[s][p]", TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(indexShardActions); IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias()).id("id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); @@ -217,7 +217,7 @@ public void testIndex() { } public void testDelete() { - String[] deleteShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" }; + String[] deleteShardActions = new String[] { TransportBulkAction.NAME + "[s][p]", TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(deleteShardActions); DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias()).id("id"); @@ -231,8 +231,8 @@ public void testUpdate() { // update action goes to the primary, index op gets executed locally, then replicated String[] updateShardActions = new String[] { TransportUpdateAction.NAME + "[s]", - BulkAction.NAME + "[s][p]", - BulkAction.NAME + "[s][r]" }; + TransportBulkAction.NAME + "[s][p]", + TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); @@ -249,8 +249,8 @@ public void testUpdateUpsert() { // update action goes to the primary, index op gets executed locally, then replicated String[] updateShardActions = new String[] { TransportUpdateAction.NAME + "[s]", - BulkAction.NAME + "[s][p]", - BulkAction.NAME + "[s][r]" }; + TransportBulkAction.NAME + "[s][p]", + TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); @@ -267,8 +267,8 @@ public void testUpdateDelete() { // update action goes to the primary, delete op gets executed locally, then replicated String[] updateShardActions = new String[] { TransportUpdateAction.NAME + "[s]", - BulkAction.NAME + "[s][p]", - BulkAction.NAME + "[s][r]" }; + TransportBulkAction.NAME + "[s][p]", + TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); @@ -284,7 +284,7 @@ public void testUpdateDelete() { } public void testBulk() { - String[] bulkShardActions = new String[] { BulkAction.NAME + "[s][p]", BulkAction.NAME + "[s][r]" }; + String[] bulkShardActions = new String[] { TransportBulkAction.NAME + "[s][p]", TransportBulkAction.NAME + "[s][r]" }; interceptTransportActions(bulkShardActions); List indicesOrAliases = new ArrayList<>(); @@ -427,7 +427,7 @@ public void testRefresh() { } public void testClearCache() { - String clearCacheAction = ClearIndicesCacheAction.NAME + "[n]"; + String clearCacheAction = TransportClearIndicesCacheAction.TYPE.name() + "[n]"; interceptTransportActions(clearCacheAction); ClearIndicesCacheRequest clearIndicesCacheRequest = new ClearIndicesCacheRequest(randomIndicesOrAliases()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 0766b732099c4..8011be1d69a04 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.search.TransportSearchAction; @@ -297,10 +297,10 @@ public void testTransportBroadcastReplicationTasks() { } public void testTransportBulkTasks() { - registerTaskManagerListeners(BulkAction.NAME); // main task - registerTaskManagerListeners(BulkAction.NAME + "[s]"); // shard task - registerTaskManagerListeners(BulkAction.NAME + "[s][p]"); // shard task on primary - registerTaskManagerListeners(BulkAction.NAME + "[s][r]"); // shard task on replica + registerTaskManagerListeners(TransportBulkAction.NAME); // main task + registerTaskManagerListeners(TransportBulkAction.NAME + "[s]"); // shard task + registerTaskManagerListeners(TransportBulkAction.NAME + "[s][p]"); // shard task on primary + registerTaskManagerListeners(TransportBulkAction.NAME + "[s][r]"); // shard task on replica createIndex("test"); ensureGreen("test"); // Make sure all shards are allocated to catch replication tasks // ensures the mapping is available on all nodes so we won't retry the request (in case replicas don't have the right mapping). @@ -308,13 +308,13 @@ public void testTransportBulkTasks() { client().prepareBulk().add(prepareIndex("test").setId("test_id").setSource("{\"foo\": \"bar\"}", XContentType.JSON)).get(); // the bulk operation should produce one main task - List topTask = findEvents(BulkAction.NAME, Tuple::v1); + List topTask = findEvents(TransportBulkAction.NAME, Tuple::v1); assertEquals(1, topTask.size()); assertEquals("requests[1], indices[test]", topTask.get(0).description()); // we should also get 1 or 2 [s] operation with main operation as a parent // in case the primary is located on the coordinating node we will have 1 operation, otherwise - 2 - List shardTasks = findEvents(BulkAction.NAME + "[s]", Tuple::v1); + List shardTasks = findEvents(TransportBulkAction.NAME + "[s]", Tuple::v1); assertThat(shardTasks.size(), allOf(lessThanOrEqualTo(2), greaterThanOrEqualTo(1))); // Select the effective shard task @@ -323,30 +323,30 @@ public void testTransportBulkTasks() { // we have only one task - it's going to be the parent task for all [s][p] and [s][r] tasks shardTask = shardTasks.get(0); // and it should have the main task as a parent - assertParentTask(shardTask, findEvents(BulkAction.NAME, Tuple::v1).get(0)); + assertParentTask(shardTask, findEvents(TransportBulkAction.NAME, Tuple::v1).get(0)); } else { if (shardTasks.get(0).parentTaskId().equals(shardTasks.get(1).taskId())) { // task 1 is the parent of task 0, that means that task 0 will control [s][p] and [s][r] tasks shardTask = shardTasks.get(0); // in turn the parent of the task 1 should be the main task - assertParentTask(shardTasks.get(1), findEvents(BulkAction.NAME, Tuple::v1).get(0)); + assertParentTask(shardTasks.get(1), findEvents(TransportBulkAction.NAME, Tuple::v1).get(0)); } else { // otherwise task 1 will control [s][p] and [s][r] tasks shardTask = shardTasks.get(1); // in turn the parent of the task 0 should be the main task - assertParentTask(shardTasks.get(0), findEvents(BulkAction.NAME, Tuple::v1).get(0)); + assertParentTask(shardTasks.get(0), findEvents(TransportBulkAction.NAME, Tuple::v1).get(0)); } } assertThat(shardTask.description(), startsWith("requests[1], index[test][")); // we should also get one [s][p] operation with shard operation as a parent - assertEquals(1, numberOfEvents(BulkAction.NAME + "[s][p]", Tuple::v1)); - assertParentTask(findEvents(BulkAction.NAME + "[s][p]", Tuple::v1), shardTask); + assertEquals(1, numberOfEvents(TransportBulkAction.NAME + "[s][p]", Tuple::v1)); + assertParentTask(findEvents(TransportBulkAction.NAME + "[s][p]", Tuple::v1), shardTask); // we should get as many [s][r] operations as we have replica shards // they all should have the same shard task as a parent - assertEquals(getNumShards("test").numReplicas, numberOfEvents(BulkAction.NAME + "[s][r]", Tuple::v1)); - assertParentTask(findEvents(BulkAction.NAME + "[s][r]", Tuple::v1), shardTask); + assertEquals(getNumShards("test").numReplicas, numberOfEvents(TransportBulkAction.NAME + "[s][r]", Tuple::v1)); + assertParentTask(findEvents(TransportBulkAction.NAME + "[s][r]", Tuple::v1), shardTask); } public void testSearchTaskDescriptions() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java index a0d437d8baa73..235d1592cf7c7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java @@ -129,7 +129,7 @@ public void testSimple() throws Exception { client().admin().indices().prepareForceMerge(index).setMaxNumSegments(1).get(); PlainActionFuture future = new PlainActionFuture<>(); client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { index }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true), future ); @@ -176,7 +176,7 @@ public void testFailOnFlush() throws Exception { ); failOnFlushShards.addAll(failedShards); AnalyzeIndexDiskUsageResponse resp = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { indexName }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); assertThat(resp.getTotalShards(), equalTo(numberOfShards)); @@ -208,7 +208,7 @@ public void testManyShards() throws Exception { } AnalyzeIndexDiskUsageResponse resp = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { "index_*" }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); assertThat(Arrays.toString(resp.getShardFailures()), resp.getShardFailures(), emptyArray()); @@ -249,25 +249,28 @@ public void testFailingTargetShards() throws Exception { try { for (String node : internalCluster().getNodeNames()) { MockTransportService.getInstance(node) - .addRequestHandlingBehavior(AnalyzeIndexDiskUsageAction.NAME + "[s]", (handler, request, channel, task) -> { - AnalyzeDiskUsageShardRequest shardRequest = (AnalyzeDiskUsageShardRequest) request; - IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); - logger.info("--> handling shard request {} on node {}", shardRequest.shardId(), node); - ShardId shardId = shardRequest.shardId(); - if (failingShards.contains(shardId)) { - IndexShard indexShard = indicesService.getShardOrNull(shardId); - assertNotNull("No shard found for shard " + shardId, indexShard); - logger.info("--> failing shard {} on node {}", shardRequest.shardId(), node); - indexShard.close("test", randomBoolean()); - failedShards.incrementAndGet(); - } else { - successfulShards.incrementAndGet(); + .addRequestHandlingBehavior( + TransportAnalyzeIndexDiskUsageAction.TYPE.name() + "[s]", + (handler, request, channel, task) -> { + AnalyzeDiskUsageShardRequest shardRequest = (AnalyzeDiskUsageShardRequest) request; + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); + logger.info("--> handling shard request {} on node {}", shardRequest.shardId(), node); + ShardId shardId = shardRequest.shardId(); + if (failingShards.contains(shardId)) { + IndexShard indexShard = indicesService.getShardOrNull(shardId); + assertNotNull("No shard found for shard " + shardId, indexShard); + logger.info("--> failing shard {} on node {}", shardRequest.shardId(), node); + indexShard.close("test", randomBoolean()); + failedShards.incrementAndGet(); + } else { + successfulShards.incrementAndGet(); + } + handler.messageReceived(request, channel, task); } - handler.messageReceived(request, channel, task); - }); + ); } AnalyzeIndexDiskUsageResponse resp = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { indexName }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); assertThat(failedShards.get(), equalTo(failingShards.size())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 40982516725b7..23963fe50aa44 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -35,7 +35,7 @@ import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ActiveShardCount; @@ -1936,7 +1936,7 @@ public void accept(long globalCheckpoint, Exception e) { // delay the delivery of the replica write until the end of the test so the replica never becomes in-sync replicaNodeTransportService.addRequestHandlingBehavior( - BulkAction.NAME + "[s][r]", + TransportBulkAction.NAME + "[s][r]", (handler, request, channel, task) -> recoveryCompleteListener.addListener( assertNoFailureListener(ignored -> handler.messageReceived(request, channel, task)) ) diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index a8f26ab966646..7e03b495438d8 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -36,15 +36,12 @@ import org.elasticsearch.action.admin.cluster.node.shutdown.TransportPrevalidateNodeRemovalAction; import org.elasticsearch.action.admin.cluster.node.shutdown.TransportPrevalidateShardPathAction; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.node.usage.TransportNodesUsageAction; import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; import org.elasticsearch.action.admin.cluster.remote.TransportRemoteInfoAction; -import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; @@ -97,19 +94,16 @@ import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.close.TransportVerifyShardBeforeCloseAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.dangling.delete.TransportDeleteDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.find.TransportFindDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.import_index.TransportImportDanglingIndexAction; import org.elasticsearch.action.admin.indices.dangling.list.TransportListDanglingIndicesAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; -import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; @@ -128,7 +122,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.admin.indices.open.TransportOpenIndexAction; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.TransportVerifyShardIndexBlockAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; @@ -172,7 +165,6 @@ import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; import org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.SimulateBulkAction; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; @@ -637,8 +629,8 @@ public void reg actions.register(TransportNodesUsageAction.TYPE, TransportNodesUsageAction.class); actions.register(TransportNodesHotThreadsAction.TYPE, TransportNodesHotThreadsAction.class); actions.register(TransportListTasksAction.TYPE, TransportListTasksAction.class); - actions.register(GetTaskAction.INSTANCE, TransportGetTaskAction.class); - actions.register(CancelTasksAction.INSTANCE, TransportCancelTasksAction.class); + actions.register(TransportGetTaskAction.TYPE, TransportGetTaskAction.class); + actions.register(TransportCancelTasksAction.TYPE, TransportCancelTasksAction.class); actions.register(GetHealthAction.INSTANCE, GetHealthAction.LocalAction.class); actions.register(PrevalidateNodeRemovalAction.INSTANCE, TransportPrevalidateNodeRemovalAction.class); actions.register(HealthApiStatsAction.INSTANCE, HealthApiStatsTransportAction.class); @@ -662,7 +654,7 @@ public void reg actions.register(GetRepositoriesAction.INSTANCE, TransportGetRepositoriesAction.class); actions.register(TransportDeleteRepositoryAction.TYPE, TransportDeleteRepositoryAction.class); actions.register(VerifyRepositoryAction.INSTANCE, TransportVerifyRepositoryAction.class); - actions.register(CleanupRepositoryAction.INSTANCE, TransportCleanupRepositoryAction.class); + actions.register(TransportCleanupRepositoryAction.TYPE, TransportCleanupRepositoryAction.class); actions.register(GetSnapshotsAction.INSTANCE, TransportGetSnapshotsAction.class); actions.register(TransportDeleteSnapshotAction.TYPE, TransportDeleteSnapshotAction.class); actions.register(CreateSnapshotAction.INSTANCE, TransportCreateSnapshotAction.class); @@ -678,7 +670,7 @@ public void reg actions.register(IndicesStatsAction.INSTANCE, TransportIndicesStatsAction.class); actions.register(IndicesSegmentsAction.INSTANCE, TransportIndicesSegmentsAction.class); actions.register(TransportIndicesShardStoresAction.TYPE, TransportIndicesShardStoresAction.class); - actions.register(CreateIndexAction.INSTANCE, TransportCreateIndexAction.class); + actions.register(TransportCreateIndexAction.TYPE, TransportCreateIndexAction.class); actions.register(ResizeAction.INSTANCE, TransportResizeAction.class); actions.register(RolloverAction.INSTANCE, TransportRolloverAction.class); actions.register(LazyRolloverAction.INSTANCE, LazyRolloverAction.TransportLazyRolloverAction.class); @@ -686,7 +678,7 @@ public void reg actions.register(GetIndexAction.INSTANCE, TransportGetIndexAction.class); actions.register(OpenIndexAction.INSTANCE, TransportOpenIndexAction.class); actions.register(TransportCloseIndexAction.TYPE, TransportCloseIndexAction.class); - actions.register(AddIndexBlockAction.INSTANCE, TransportAddIndexBlockAction.class); + actions.register(TransportAddIndexBlockAction.TYPE, TransportAddIndexBlockAction.class); actions.register(GetMappingsAction.INSTANCE, TransportGetMappingsAction.class); actions.register(GetFieldMappingsAction.INSTANCE, TransportGetFieldMappingsAction.class); actions.register(TransportGetFieldMappingsIndexAction.TYPE, TransportGetFieldMappingsIndexAction.class); @@ -711,7 +703,7 @@ public void reg actions.register(RefreshAction.INSTANCE, TransportRefreshAction.class); actions.register(FlushAction.INSTANCE, TransportFlushAction.class); actions.register(ForceMergeAction.INSTANCE, TransportForceMergeAction.class); - actions.register(ClearIndicesCacheAction.INSTANCE, TransportClearIndicesCacheAction.class); + actions.register(TransportClearIndicesCacheAction.TYPE, TransportClearIndicesCacheAction.class); actions.register(GetAliasesAction.INSTANCE, TransportGetAliasesAction.class); actions.register(GetSettingsAction.INSTANCE, TransportGetSettingsAction.class); @@ -724,7 +716,7 @@ public void reg actions.register(TransportUpdateAction.TYPE, TransportUpdateAction.class); actions.register(TransportMultiGetAction.TYPE, TransportMultiGetAction.class); actions.register(TransportShardMultiGetAction.TYPE, TransportShardMultiGetAction.class); - actions.register(BulkAction.INSTANCE, TransportBulkAction.class); + actions.register(TransportBulkAction.TYPE, TransportBulkAction.class); actions.register(SimulateBulkAction.INSTANCE, TransportSimulateBulkAction.class); actions.register(TransportShardBulkAction.TYPE, TransportShardBulkAction.class); actions.register(TransportSearchAction.TYPE, TransportSearchAction.class); @@ -740,7 +732,7 @@ public void reg actions.register(AutoCreateAction.INSTANCE, AutoCreateAction.TransportAction.class); actions.register(ResolveIndexAction.INSTANCE, ResolveIndexAction.TransportAction.class); actions.register(TransportResolveClusterAction.TYPE, TransportResolveClusterAction.class); - actions.register(AnalyzeIndexDiskUsageAction.INSTANCE, TransportAnalyzeIndexDiskUsageAction.class); + actions.register(TransportAnalyzeIndexDiskUsageAction.TYPE, TransportAnalyzeIndexDiskUsageAction.class); actions.register(FieldUsageStatsAction.INSTANCE, TransportFieldUsageAction.class); actions.register(MasterHistoryAction.INSTANCE, MasterHistoryAction.TransportAction.class); actions.register(CoordinationDiagnosticsAction.INSTANCE, CoordinationDiagnosticsAction.TransportAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java deleted file mode 100644 index 50fea2093da49..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.node.tasks.cancel; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; - -/** - * ActionType for cancelling running tasks - */ -public class CancelTasksAction extends ActionType { - - public static final CancelTasksAction INSTANCE = new CancelTasksAction(); - public static final String NAME = "cluster:admin/tasks/cancel"; - - private CancelTasksAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java index 5fdd50e0c9e66..39c19e225d175 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java @@ -18,7 +18,7 @@ public class CancelTasksRequestBuilder extends TasksRequestBuilder { public CancelTasksRequestBuilder(ElasticsearchClient client) { - super(client, CancelTasksAction.INSTANCE, new CancelTasksRequest()); + super(client, TransportCancelTasksAction.TYPE, new CancelTasksRequest()); } public CancelTasksRequestBuilder waitForCompletion(boolean waitForCompletion) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index 1f3271be79797..d2e79bc63daf8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -34,10 +35,14 @@ */ public class TransportCancelTasksAction extends TransportTasksAction { + public static final String NAME = "cluster:admin/tasks/cancel"; + + public static final ActionType TYPE = new ActionType<>(NAME); + @Inject public TransportCancelTasksAction(ClusterService clusterService, TransportService transportService, ActionFilters actionFilters) { super( - CancelTasksAction.NAME, + NAME, clusterService, transportService, actionFilters, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java deleted file mode 100644 index 21be31462ef0d..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.node.tasks.get; - -import org.elasticsearch.action.ActionType; - -/** - * ActionType for retrieving a list of currently running tasks - */ -public class GetTaskAction extends ActionType { - public static final String TASKS_ORIGIN = "tasks"; - - public static final GetTaskAction INSTANCE = new GetTaskAction(); - public static final String NAME = "cluster:monitor/task/get"; - - private GetTaskAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java index 49eacd0996111..6d8d4eceed3f5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskRequestBuilder.java @@ -18,7 +18,7 @@ */ public class GetTaskRequestBuilder extends ActionRequestBuilder { public GetTaskRequestBuilder(ElasticsearchClient client) { - super(client, GetTaskAction.INSTANCE, new GetTaskRequest()); + super(client, TransportGetTaskAction.TYPE, new GetTaskRequest()); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index 9e0b6937257b4..c8b33e6d569d2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; @@ -44,7 +45,6 @@ import java.io.IOException; import static java.util.Objects.requireNonNullElse; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.core.TimeValue.timeValueSeconds; /** @@ -59,6 +59,8 @@ */ public class TransportGetTaskAction extends HandledTransportAction { + public static final String TASKS_ORIGIN = "tasks"; + public static final ActionType TYPE = new ActionType<>("cluster:monitor/task/get"); private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30); private final ThreadPool threadPool; @@ -76,7 +78,7 @@ public TransportGetTaskAction( Client client, NamedXContentRegistry xContentRegistry ) { - super(GetTaskAction.NAME, transportService, actionFilters, GetTaskRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + super(TYPE.name(), transportService, actionFilters, GetTaskRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; @@ -120,7 +122,7 @@ private void runOnNodeWithTaskIfPossible(Task thisTask, GetTaskRequest request, GetTaskRequest nodeRequest = request.nodeRequest(clusterService.localNode().getId(), thisTask.getId()); transportService.sendRequest( node, - GetTaskAction.NAME, + TYPE.name(), nodeRequest, TransportRequestOptions.timeout(request.getTimeout()), new ActionListenerResponseHandler<>(listener, GetTaskResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java deleted file mode 100644 index d71c66fd6f3ca..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.action.admin.cluster.repositories.cleanup; - -import org.elasticsearch.action.ActionType; - -public final class CleanupRepositoryAction extends ActionType { - - public static final CleanupRepositoryAction INSTANCE = new CleanupRepositoryAction(); - public static final String NAME = "cluster:admin/repository/_cleanup"; - - private CleanupRepositoryAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java index 680502c783a8b..b253264f039e4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/CleanupRepositoryRequestBuilder.java @@ -16,7 +16,7 @@ public class CleanupRepositoryRequestBuilder extends MasterNodeOperationRequestB CleanupRepositoryRequestBuilder> { public CleanupRepositoryRequestBuilder(ElasticsearchClient client, String repository) { - super(client, CleanupRepositoryAction.INSTANCE, new CleanupRepositoryRequest(repository)); + super(client, TransportCleanupRepositoryAction.TYPE, new CleanupRepositoryRequest(repository)); } public CleanupRepositoryRequestBuilder setName(String repository) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java index 2c8371b0af4f2..4892efaf5ae1f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; @@ -59,6 +60,7 @@ */ public final class TransportCleanupRepositoryAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("cluster:admin/repository/_cleanup"); private static final Logger logger = LogManager.getLogger(TransportCleanupRepositoryAction.class); private final RepositoriesService repositoriesService; @@ -73,7 +75,7 @@ public TransportCleanupRepositoryAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - CleanupRepositoryAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java deleted file mode 100644 index 74184598c6db2..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.cache.clear; - -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; - -public class ClearIndicesCacheAction extends ActionType { - - public static final ClearIndicesCacheAction INSTANCE = new ClearIndicesCacheAction(); - public static final String NAME = "indices:admin/cache/clear"; - - private ClearIndicesCacheAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java index fb6139c0ae4e3..43ad9bff9af8d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java @@ -18,7 +18,7 @@ public class ClearIndicesCacheRequestBuilder extends BroadcastOperationRequestBu ClearIndicesCacheRequestBuilder> { public ClearIndicesCacheRequestBuilder(ElasticsearchClient client) { - super(client, ClearIndicesCacheAction.INSTANCE, new ClearIndicesCacheRequest()); + super(client, TransportClearIndicesCacheAction.TYPE, new ClearIndicesCacheRequest()); } public ClearIndicesCacheRequestBuilder setQueryCache(boolean queryCache) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index faeaf0bdb575a..428fd6e083116 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; @@ -36,6 +37,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc BroadcastResponse, TransportBroadcastByNodeAction.EmptyResult> { + public static final ActionType TYPE = new ActionType<>("indices:admin/cache/clear"); private final IndicesService indicesService; @Inject @@ -47,7 +49,7 @@ public TransportClearIndicesCacheAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - ClearIndicesCacheAction.NAME, + TYPE.name(), clusterService, transportService, actionFilters, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java deleted file mode 100644 index 5560c44f3fcbe..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.create; - -import org.elasticsearch.action.ActionType; - -public class CreateIndexAction extends ActionType { - - public static final CreateIndexAction INSTANCE = new CreateIndexAction(); - public static final String NAME = "indices:admin/create"; - - private CreateIndexAction() { - super(NAME); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 307cafbb9b8e1..4e265f4052e72 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -29,11 +29,11 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder< CreateIndexRequestBuilder> { public CreateIndexRequestBuilder(ElasticsearchClient client) { - super(client, CreateIndexAction.INSTANCE, new CreateIndexRequest()); + super(client, TransportCreateIndexAction.TYPE, new CreateIndexRequest()); } public CreateIndexRequestBuilder(ElasticsearchClient client, String index) { - super(client, CreateIndexAction.INSTANCE, new CreateIndexRequest(index)); + super(client, TransportCreateIndexAction.TYPE, new CreateIndexRequest(index)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index c03cba9b40a33..72f4c4676cf1d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; @@ -43,6 +44,7 @@ * Create index action. */ public class TransportCreateIndexAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("indices:admin/create"); private static final Logger logger = LogManager.getLogger(TransportCreateIndexAction.class); private final MetadataCreateIndexService createIndexService; @@ -59,7 +61,7 @@ public TransportCreateIndexAction( SystemIndices systemIndices ) { super( - CreateIndexAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java deleted file mode 100644 index 710bf5077b73d..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageAction.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.diskusage; - -import org.elasticsearch.action.ActionType; - -public class AnalyzeIndexDiskUsageAction extends ActionType { - public static final AnalyzeIndexDiskUsageAction INSTANCE = new AnalyzeIndexDiskUsageAction(); - public static final String NAME = "indices:admin/analyze_disk_usage"; - - public AnalyzeIndexDiskUsageAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageRequest.java index 08bc4469deaa1..119bb8d9dec61 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/AnalyzeIndexDiskUsageRequest.java @@ -58,7 +58,7 @@ public void setParentTask(String parentTaskNode, long parentTaskId) { @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new CancellableTask(id, AnalyzeIndexDiskUsageAction.NAME, type, "", parentTaskId, headers) { + return new CancellableTask(id, TransportAnalyzeIndexDiskUsageAction.TYPE.name(), type, "", parentTaskId, headers) { @Override public String getDescription() { return AnalyzeIndexDiskUsageRequest.this.getDescription(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java index 69e694447bccd..8380edb4cb6ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/TransportAnalyzeIndexDiskUsageAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; @@ -51,6 +52,7 @@ public class TransportAnalyzeIndexDiskUsageAction extends TransportBroadcastActi AnalyzeIndexDiskUsageResponse, AnalyzeDiskUsageShardRequest, AnalyzeDiskUsageShardResponse> { + public static final ActionType TYPE = new ActionType<>("indices:admin/analyze_disk_usage"); private final IndicesService indicesService; private final ThreadPool threadPool; @@ -63,7 +65,7 @@ public TransportAnalyzeIndexDiskUsageAction( IndexNameExpressionResolver indexNameExpressionResolver ) { super( - AnalyzeIndexDiskUsageAction.NAME, + TYPE.name(), clusterService, transportService, actionFilters, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java deleted file mode 100644 index 460be3cf10c1c..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockAction.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.readonly; - -import org.elasticsearch.action.ActionType; - -public class AddIndexBlockAction extends ActionType { - - public static final AddIndexBlockAction INSTANCE = new AddIndexBlockAction(); - public static final String NAME = "indices:admin/block/add"; - - private AddIndexBlockAction() { - super(NAME); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java index 5b16e6889ad22..4bcff8d1f6b16 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequestBuilder.java @@ -22,7 +22,7 @@ public class AddIndexBlockRequestBuilder extends AcknowledgedRequestBuilder< AddIndexBlockRequestBuilder> { public AddIndexBlockRequestBuilder(ElasticsearchClient client, APIBlock block, String... indices) { - super(client, AddIndexBlockAction.INSTANCE, new AddIndexBlockRequest(block, indices)); + super(client, TransportAddIndexBlockAction.TYPE, new AddIndexBlockRequest(block, indices)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java index 731257ddabbad..ab2549bf9de67 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportAddIndexBlockAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -40,6 +41,7 @@ */ public class TransportAddIndexBlockAction extends TransportMasterNodeAction { + public static final ActionType TYPE = new ActionType<>("indices:admin/block/add"); private static final Logger logger = LogManager.getLogger(TransportAddIndexBlockAction.class); private final MetadataIndexStateService indexStateService; @@ -56,7 +58,7 @@ public TransportAddIndexBlockAction( DestructiveOperations destructiveOperations ) { super( - AddIndexBlockAction.NAME, + TYPE.name(), transportService, clusterService, threadPool, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java index ac590d1a4d826..31e9f959f0fe7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/TransportVerifyShardIndexBlockAction.java @@ -44,8 +44,7 @@ public class TransportVerifyShardIndexBlockAction extends TransportReplicationAc TransportVerifyShardIndexBlockAction.ShardRequest, ReplicationResponse> { - public static final String NAME = AddIndexBlockAction.NAME + "[s]"; - public static final ActionType TYPE = new ActionType<>(NAME); + public static final ActionType TYPE = new ActionType<>(TransportAddIndexBlockAction.TYPE.name() + "[s]"); @Inject public TransportVerifyShardIndexBlockAction( @@ -59,7 +58,7 @@ public TransportVerifyShardIndexBlockAction( ) { super( settings, - NAME, + TYPE.name(), transportService, clusterService, indicesService, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java deleted file mode 100644 index bc72e039e6ded..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.bulk; - -import org.elasticsearch.action.ActionType; - -public class BulkAction extends ActionType { - - public static final BulkAction INSTANCE = new BulkAction(); - public static final String NAME = "indices:data/write/bulk"; - - private BulkAction() { - super(NAME); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java index 2e2938b63334e..6a90c46fc7fab 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java @@ -57,7 +57,7 @@ public class BulkRequestBuilder extends ActionRequestLazyBuilder { + public static final String NAME = "indices:data/write/bulk"; + public static final ActionType TYPE = new ActionType<>(NAME); private static final Logger logger = LogManager.getLogger(TransportBulkAction.class); public static final String LAZY_ROLLOVER_ORIGIN = "lazy_rollover"; @@ -141,7 +143,7 @@ public TransportBulkAction( LongSupplier relativeTimeProvider ) { this( - BulkAction.INSTANCE, + TYPE, BulkRequest::new, threadPool, transportService, diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 4cf10b3c27824..265719b4738c0 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -78,7 +78,7 @@ /** Performs shard-level bulk (index, delete or update) operations */ public class TransportShardBulkAction extends TransportWriteAction { - public static final String ACTION_NAME = BulkAction.NAME + "[s]"; + public static final String ACTION_NAME = TransportBulkAction.NAME + "[s]"; public static final ActionType TYPE = new ActionType<>(ACTION_NAME); private static final Logger logger = LogManager.getLogger(TransportShardBulkAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index d0ae8d1ccb3f1..66c395cf51d96 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.OriginSettingClient; @@ -656,6 +656,6 @@ public void cancelSearchTask(SearchTask task, String reason) { CancelTasksRequest req = new CancelTasksRequest().setTargetTaskId(new TaskId(client.getLocalNodeId(), task.getId())) .setReason("Fatal failure during search: " + reason); // force the origin to execute the cancellation as a system user - new OriginSettingClient(client, GetTaskAction.TASKS_ORIGIN).admin().cluster().cancelTasks(req, ActionListener.noop()); + new OriginSettingClient(client, TransportGetTaskAction.TASKS_ORIGIN).admin().cluster().cancelTasks(req, ActionListener.noop()); } } diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index c6d9c3a8f3563..76073696b0b27 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -32,13 +32,13 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequestBuilder; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -46,10 +46,10 @@ import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequest; import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageResponse; import org.elasticsearch.action.admin.cluster.node.usage.TransportNodesUsageAction; -import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; +import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; @@ -125,17 +125,17 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder; +import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; @@ -164,10 +164,10 @@ import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequestBuilder; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequestBuilder; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest; import org.elasticsearch.action.admin.indices.recovery.RecoveryRequestBuilder; @@ -212,10 +212,10 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteRequestBuilder; import org.elasticsearch.action.delete.DeleteResponse; @@ -428,12 +428,12 @@ public DeleteRequestBuilder prepareDelete(String index, String id) { @Override public ActionFuture bulk(final BulkRequest request) { - return execute(BulkAction.INSTANCE, request); + return execute(TransportBulkAction.TYPE, request); } @Override public void bulk(final BulkRequest request, final ActionListener listener) { - execute(BulkAction.INSTANCE, request, listener); + execute(TransportBulkAction.TYPE, request, listener); } @Override @@ -782,12 +782,12 @@ public ListTasksRequestBuilder prepareListTasks(String... nodesIds) { @Override public ActionFuture getTask(final GetTaskRequest request) { - return execute(GetTaskAction.INSTANCE, request); + return execute(TransportGetTaskAction.TYPE, request); } @Override public void getTask(final GetTaskRequest request, final ActionListener listener) { - execute(GetTaskAction.INSTANCE, request, listener); + execute(TransportGetTaskAction.TYPE, request, listener); } @Override @@ -802,12 +802,12 @@ public GetTaskRequestBuilder prepareGetTask(TaskId taskId) { @Override public ActionFuture cancelTasks(CancelTasksRequest request) { - return execute(CancelTasksAction.INSTANCE, request); + return execute(TransportCancelTasksAction.TYPE, request); } @Override public void cancelTasks(CancelTasksRequest request, ActionListener listener) { - execute(CancelTasksAction.INSTANCE, request, listener); + execute(TransportCancelTasksAction.TYPE, request, listener); } @Override @@ -917,7 +917,7 @@ public CleanupRepositoryRequestBuilder prepareCleanupRepository(String repositor @Override public void cleanupRepository(CleanupRepositoryRequest request, ActionListener listener) { - execute(CleanupRepositoryAction.INSTANCE, request, listener); + execute(TransportCleanupRepositoryAction.TYPE, request, listener); } @Override @@ -1114,7 +1114,7 @@ public GetAliasesRequestBuilder prepareGetAliases(String... aliases) { @Override public ActionFuture clearCache(final ClearIndicesCacheRequest request) { - return execute(ClearIndicesCacheAction.INSTANCE, request); + return execute(TransportClearIndicesCacheAction.TYPE, request); } @Override @@ -1134,7 +1134,7 @@ public GetIndexRequestBuilder prepareGetIndex() { @Override public void clearCache(final ClearIndicesCacheRequest request, final ActionListener listener) { - execute(ClearIndicesCacheAction.INSTANCE, request, listener); + execute(TransportClearIndicesCacheAction.TYPE, request, listener); } @Override @@ -1144,12 +1144,12 @@ public ClearIndicesCacheRequestBuilder prepareClearCache(String... indices) { @Override public ActionFuture create(final CreateIndexRequest request) { - return execute(CreateIndexAction.INSTANCE, request); + return execute(TransportCreateIndexAction.TYPE, request); } @Override public void create(final CreateIndexRequest request, final ActionListener listener) { - execute(CreateIndexAction.INSTANCE, request, listener); + execute(TransportCreateIndexAction.TYPE, request, listener); } @Override @@ -1204,7 +1204,7 @@ public AddIndexBlockRequestBuilder prepareAddBlock(APIBlock block, String... ind @Override public void addBlock(AddIndexBlockRequest request, ActionListener listener) { - execute(AddIndexBlockAction.INSTANCE, request, listener); + execute(TransportAddIndexBlockAction.TYPE, request, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestCancellableNodeClient.java b/server/src/main/java/org/elasticsearch/rest/action/RestCancellableNodeClient.java index 3c1aa54bc7056..6406b7b2b93c3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestCancellableNodeClient.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestCancellableNodeClient.java @@ -30,7 +30,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; /** * A {@linkplain Client} that cancels tasks executed locally when the provided {@link HttpChannel} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeIndexDiskUsageAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeIndexDiskUsageAction.java index c139675a1ded3..e1732f267a814 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeIndexDiskUsageAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeIndexDiskUsageAction.java @@ -8,8 +8,8 @@ package org.elasticsearch.rest.action.admin.indices; -import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageRequest; +import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; @@ -54,7 +54,7 @@ public BaseRestHandler.RestChannelConsumer prepareRequest(final RestRequest requ final AnalyzeIndexDiskUsageRequest analyzeRequest = new AnalyzeIndexDiskUsageRequest(indices, indicesOptions, flush); return channel -> { final RestCancellableNodeClient cancelClient = new RestCancellableNodeClient(client, request.getHttpChannel()); - cancelClient.execute(AnalyzeIndexDiskUsageAction.INSTANCE, analyzeRequest, new RestToXContentListener<>(channel)); + cancelClient.execute(TransportAnalyzeIndexDiskUsageAction.TYPE, analyzeRequest, new RestToXContentListener<>(channel)); }; } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java index a72ef9e83ccf2..8632cd6cfea77 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskResultsService.java @@ -35,7 +35,7 @@ import java.util.Iterator; import java.util.Map; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.core.TimeValue.timeValueMillis; import static org.elasticsearch.tasks.TaskInfo.INCLUDE_CANCELLED_PARAM; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index adefd71f93590..22953f9959c1d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionTestUtils; @@ -555,7 +555,7 @@ public void testNonExistingTaskCancellation() throws Exception { // Make sure that main task is no longer running ListTasksResponse listTasksResponse = ActionTestUtils.executeBlocking( testNodes[randomIntBetween(0, testNodes.length - 1)].transportListTasksAction, - new ListTasksRequest().setActions(CancelTasksAction.NAME + "*") + new ListTasksRequest().setActions(TransportCancelTasksAction.NAME + "*") ); assertEquals(0, listTasksResponse.getTasks().size()); }); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index d714105d9a13a..8b8d4e52d33d4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -61,7 +61,7 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.test.ESTestCase.waitUntil; /** diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 6815d634292a4..b97e8303a8eb5 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -442,7 +442,7 @@ public void testIngestForward() throws Exception { verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any(), any(), any(), any()); // but instead should have sent to a remote node with the transport service ArgumentCaptor node = ArgumentCaptor.forClass(DiscoveryNode.class); - verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); + verify(transportService).sendRequest(node.capture(), eq(TransportBulkAction.NAME), any(), remoteResponseHandler.capture()); boolean usedNode1 = node.getValue() == remoteNode1; // make sure we used one of the nodes if (usedNode1 == false) { assertSame(remoteNode2, node.getValue()); @@ -457,7 +457,7 @@ public void testIngestForward() throws Exception { // now make sure ingest nodes are rotated through with a subsequent request reset(transportService); ActionTestUtils.execute(action, null, bulkRequest, listener); - verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); + verify(transportService).sendRequest(node.capture(), eq(TransportBulkAction.NAME), any(), remoteResponseHandler.capture()); if (usedNode1) { assertSame(remoteNode2, node.getValue()); } else { @@ -482,7 +482,7 @@ public void testSingleItemBulkActionIngestForward() throws Exception { verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any(), any(), any(), any()); // but instead should have sent to a remote node with the transport service ArgumentCaptor node = ArgumentCaptor.forClass(DiscoveryNode.class); - verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); + verify(transportService).sendRequest(node.capture(), eq(TransportBulkAction.NAME), any(), remoteResponseHandler.capture()); boolean usedNode1 = node.getValue() == remoteNode1; // make sure we used one of the nodes if (usedNode1 == false) { assertSame(remoteNode2, node.getValue()); @@ -500,7 +500,7 @@ public void testSingleItemBulkActionIngestForward() throws Exception { // now make sure ingest nodes are rotated through with a subsequent request reset(transportService); ActionTestUtils.execute(singleItemBulkWriteAction, null, indexRequest, listener); - verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); + verify(transportService).sendRequest(node.capture(), eq(TransportBulkAction.NAME), any(), remoteResponseHandler.capture()); if (usedNode1) { assertSame(remoteNode2, node.getValue()); } else { diff --git a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java index 97c52ef2edc37..0a490898b7fa7 100644 --- a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java @@ -15,8 +15,8 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -60,9 +60,9 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { ClusterRerouteAction.INSTANCE, // indices admin actions - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, IndicesStatsAction.INSTANCE, - ClearIndicesCacheAction.INSTANCE, + TransportClearIndicesCacheAction.TYPE, FlushAction.INSTANCE }; protected ThreadPool threadPool; @@ -118,12 +118,15 @@ public void testActions() { client.admin().cluster().prepareReroute().execute(new AssertingActionListener<>(ClusterRerouteAction.NAME, client.threadPool())); // choosing arbitrary indices admin actions to test - client.admin().indices().prepareCreate("idx").execute(new AssertingActionListener<>(CreateIndexAction.NAME, client.threadPool())); + client.admin() + .indices() + .prepareCreate("idx") + .execute(new AssertingActionListener<>(TransportCreateIndexAction.TYPE.name(), client.threadPool())); client.admin().indices().prepareStats().execute(new AssertingActionListener<>(IndicesStatsAction.NAME, client.threadPool())); client.admin() .indices() .prepareClearCache("idx1", "idx2") - .execute(new AssertingActionListener<>(ClearIndicesCacheAction.NAME, client.threadPool())); + .execute(new AssertingActionListener<>(TransportClearIndicesCacheAction.TYPE.name(), client.threadPool())); client.admin().indices().prepareFlush().execute(new AssertingActionListener<>(FlushAction.NAME, client.threadPool())); } @@ -144,7 +147,7 @@ public void testOverrideHeader() throws Exception { client.admin() .indices() .prepareCreate("idx") - .execute(new AssertingActionListener<>(CreateIndexAction.NAME, expected, client.threadPool())); + .execute(new AssertingActionListener<>(TransportCreateIndexAction.TYPE.name(), expected, client.threadPool())); } protected static void assertHeaders(Map headers, Map expected) { diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestCancellableNodeClientTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestCancellableNodeClientTests.java index 3f425ac202b6c..a1ad29ef2ddd6 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestCancellableNodeClientTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestCancellableNodeClientTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; @@ -165,7 +165,7 @@ public Task exe ActionListener listener ) { switch (action.name()) { - case CancelTasksAction.NAME -> { + case TransportCancelTasksAction.NAME -> { CancelTasksRequest cancelTasksRequest = (CancelTasksRequest) request; assertTrue("tried to cancel the same task more than once", cancelledTasks.add(cancelTasksRequest.getTargetTaskId())); Task task = request.createTask(counter.getAndIncrement(), "cancel_task", action.name(), null, Collections.emptyMap()); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 0a53db94b9aaf..dafe994b502f0 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RequestValidators; -import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; @@ -39,7 +38,6 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.state.TransportClusterStateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; @@ -48,7 +46,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.TransportBulkAction; @@ -2324,7 +2321,7 @@ protected void assertSnapshotOrGenericThread() { new IndexSettingProviders(Set.of()) ); actions.put( - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, new TransportCreateIndexAction( transportService, clusterService, @@ -2339,7 +2336,7 @@ protected void assertSnapshotOrGenericThread() { final IndexingPressure indexingMemoryLimits = new IndexingPressure(settings); mappingUpdatedAction.setClient(client); actions.put( - BulkAction.INSTANCE, + TransportBulkAction.TYPE, new TransportBulkAction( threadPool, transportService, @@ -2477,7 +2474,7 @@ protected void assertSnapshotOrGenericThread() { ) ); actions.put( - CleanupRepositoryAction.INSTANCE, + TransportCleanupRepositoryAction.TYPE, new TransportCleanupRepositoryAction( transportService, clusterService, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java index e5bffdd5f4c11..208b6bb1b4fd0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java @@ -8,8 +8,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; @@ -38,7 +38,7 @@ public void performAction( getClient().admin() .indices() .execute( - AddIndexBlockAction.INSTANCE, + TransportAddIndexBlockAction.TYPE, new AddIndexBlockRequest(WRITE, indexMetadata.getIndex().getName()).masterNodeTimeout(TimeValue.MAX_VALUE), listener.delegateFailureAndWrap((l, response) -> { if (response.isAcknowledged() == false) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index f87123d48ccea..47e4a6913897b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -9,7 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; @@ -311,7 +311,10 @@ public class ClusterPrivilegeResolver { Set.of("cluster:admin/fleet/secrets/post", "cluster:admin/fleet/secrets/delete") ); - public static final NamedClusterPrivilege CANCEL_TASK = new ActionClusterPrivilege("cancel_task", Set.of(CancelTasksAction.NAME + "*")); + public static final NamedClusterPrivilege CANCEL_TASK = new ActionClusterPrivilege( + "cancel_task", + Set.of(TransportCancelTasksAction.NAME + "*") + ); public static final NamedClusterPrivilege MANAGE_SEARCH_APPLICATION = new ActionClusterPrivilege( "manage_search_application", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index ba00864148c24..f373453f779d7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; @@ -119,7 +119,7 @@ public final class IndexPrivilege extends Privilege { ) ); private static final Automaton CREATE_INDEX_AUTOMATON = patterns( - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), AutoCreateAction.NAME, CreateDataStreamAction.NAME ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java index 43863d1b203d1..23431e184422a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.admin.indices.analyze.TransportReloadAnalyzersAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; @@ -149,7 +149,7 @@ public class InternalUsers { IndicesStatsAction.NAME + "*", TransportUpdateSettingsAction.TYPE.name(), DownsampleAction.NAME, - AddIndexBlockAction.NAME + TransportAddIndexBlockAction.TYPE.name() ) .allowRestrictedIndices(false) .build(), @@ -168,7 +168,7 @@ public class InternalUsers { IndicesStatsAction.NAME + "*", TransportUpdateSettingsAction.TYPE.name(), DownsampleAction.NAME, - AddIndexBlockAction.NAME + TransportAddIndexBlockAction.TYPE.name() ) .allowRestrictedIndices(true) .build() }, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java index 3879a8df0fbe6..1f28afbbc75b7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditorTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -188,7 +188,7 @@ public void testAuditingBeforeTemplateInstalled() throws Exception { // the back log will be written some point later ArgumentCaptor bulkCaptor = ArgumentCaptor.forClass(BulkRequest.class); - assertBusy(() -> verify(client, times(1)).execute(eq(BulkAction.INSTANCE), bulkCaptor.capture(), any())); + assertBusy(() -> verify(client, times(1)).execute(eq(TransportBulkAction.TYPE), bulkCaptor.capture(), any())); BulkRequest bulkRequest = bulkCaptor.getValue(); assertThat(bulkRequest.numberOfActions(), equalTo(3)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java index 46c393d9f0de2..91cf339e46018 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.core.security.authz.permission; import org.apache.lucene.util.automaton.Automaton; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -310,7 +310,7 @@ public void testAuthorize() { assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); iac = fromRole.authorize( - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_index1"), md.getIndicesLookup(), fieldPermissionsCache @@ -350,7 +350,7 @@ public void testAuthorize() { assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); iac = limitedByRole.authorize( - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), md.getIndicesLookup(), fieldPermissionsCache @@ -390,7 +390,7 @@ public void testAuthorize() { assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); iac = role.authorize( - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_index1"), md.getIndicesLookup(), fieldPermissionsCache @@ -446,12 +446,12 @@ public void testCheckClusterAction() { public void testCheckIndicesAction() { Role fromRole = Role.builder(EMPTY_RESTRICTED_INDICES, "a-role").add(IndexPrivilege.READ, "ind-1").build(); assertThat(fromRole.checkIndicesAction(TransportSearchAction.TYPE.name()), is(true)); - assertThat(fromRole.checkIndicesAction(CreateIndexAction.NAME), is(false)); + assertThat(fromRole.checkIndicesAction(TransportCreateIndexAction.TYPE.name()), is(false)); { Role limitedByRole = Role.builder(EMPTY_RESTRICTED_INDICES, "limited-role").add(IndexPrivilege.ALL, "ind-1").build(); assertThat(limitedByRole.checkIndicesAction(TransportSearchAction.TYPE.name()), is(true)); - assertThat(limitedByRole.checkIndicesAction(CreateIndexAction.NAME), is(true)); + assertThat(limitedByRole.checkIndicesAction(TransportCreateIndexAction.TYPE.name()), is(true)); Role role; if (randomBoolean()) { role = limitedByRole.limitedBy(fromRole); @@ -459,7 +459,7 @@ public void testCheckIndicesAction() { role = fromRole.limitedBy(limitedByRole); } assertThat(role.checkIndicesAction(TransportSearchAction.TYPE.name()), is(true)); - assertThat(role.checkIndicesAction(CreateIndexAction.NAME), is(false)); + assertThat(role.checkIndicesAction(TransportCreateIndexAction.TYPE.name()), is(false)); } { Role limitedByRole = Role.builder(EMPTY_RESTRICTED_INDICES, "limited-role").add(IndexPrivilege.NONE, "ind-1").build(); @@ -471,7 +471,7 @@ public void testCheckIndicesAction() { role = fromRole.limitedBy(limitedByRole); } assertThat(role.checkIndicesAction(TransportSearchAction.TYPE.name()), is(false)); - assertThat(role.checkIndicesAction(CreateIndexAction.NAME), is(false)); + assertThat(role.checkIndicesAction(TransportCreateIndexAction.TYPE.name()), is(false)); } } @@ -564,13 +564,13 @@ public void testAllowedActionsMatcher() { Automaton fromRoleAutomaton = fromRole.allowedActionsMatcher("index1"); Predicate fromRolePredicate = Automatons.predicate(fromRoleAutomaton); assertThat(fromRolePredicate.test(TransportSearchAction.TYPE.name()), is(true)); - assertThat(fromRolePredicate.test(BulkAction.NAME), is(true)); + assertThat(fromRolePredicate.test(TransportBulkAction.NAME), is(true)); Role limitedByRole = Role.builder(EMPTY_RESTRICTED_INDICES, "limitedRole").add(IndexPrivilege.READ, "index1", "index2").build(); Automaton limitedByRoleAutomaton = limitedByRole.allowedActionsMatcher("index1"); Predicate limitedByRolePredicated = Automatons.predicate(limitedByRoleAutomaton); assertThat(limitedByRolePredicated.test(TransportSearchAction.TYPE.name()), is(true)); - assertThat(limitedByRolePredicated.test(BulkAction.NAME), is(false)); + assertThat(limitedByRolePredicated.test(TransportBulkAction.NAME), is(false)); Role role; if (randomBoolean()) { role = limitedByRole.limitedBy(fromRole); @@ -581,17 +581,17 @@ public void testAllowedActionsMatcher() { Automaton roleAutomaton = role.allowedActionsMatcher("index1"); Predicate rolePredicate = Automatons.predicate(roleAutomaton); assertThat(rolePredicate.test(TransportSearchAction.TYPE.name()), is(true)); - assertThat(rolePredicate.test(BulkAction.NAME), is(false)); + assertThat(rolePredicate.test(TransportBulkAction.NAME), is(false)); roleAutomaton = role.allowedActionsMatcher("index2"); rolePredicate = Automatons.predicate(roleAutomaton); assertThat(rolePredicate.test(TransportSearchAction.TYPE.name()), is(true)); - assertThat(rolePredicate.test(BulkAction.NAME), is(false)); + assertThat(rolePredicate.test(TransportBulkAction.NAME), is(false)); roleAutomaton = role.allowedActionsMatcher("other"); rolePredicate = Automatons.predicate(roleAutomaton); assertThat(rolePredicate.test(TransportSearchAction.TYPE.name()), is(false)); - assertThat(rolePredicate.test(BulkAction.NAME), is(false)); + assertThat(rolePredicate.test(TransportBulkAction.NAME), is(false)); } public void testCheckClusterPrivilege() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 21827c4b9a373..aa9bb1dd579bf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -8,7 +8,7 @@ import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -531,8 +531,8 @@ public void testIngestPipelinePrivileges() { } public void testCancelTasksPrivilege() { - verifyClusterActionAllowed(ClusterPrivilegeResolver.CANCEL_TASK, CancelTasksAction.NAME); - verifyClusterActionAllowed(ClusterPrivilegeResolver.CANCEL_TASK, CancelTasksAction.NAME + "[n]"); + verifyClusterActionAllowed(ClusterPrivilegeResolver.CANCEL_TASK, TransportCancelTasksAction.NAME); + verifyClusterActionAllowed(ClusterPrivilegeResolver.CANCEL_TASK, TransportCancelTasksAction.NAME + "[n]"); verifyClusterActionDenied(ClusterPrivilegeResolver.CANCEL_TASK, "cluster:admin/whatever"); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 4ff250c3a68b3..b0d25949947e3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; @@ -40,7 +40,7 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.datastreams.GetDataStreamAction; @@ -622,7 +622,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -656,7 +659,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -690,7 +696,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( @@ -721,7 +730,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -753,7 +765,10 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -776,7 +791,10 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -820,7 +838,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -845,7 +866,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -874,7 +898,7 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(dotFleetSecretsIndex), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(dotFleetSecretsIndex), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(dotFleetSecretsIndex), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(dotFleetSecretsIndex), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(dotFleetSecretsIndex), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(dotFleetSecretsIndex), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(dotFleetSecretsIndex), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(dotFleetSecretsIndex), is(false)); @@ -897,7 +921,10 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -924,7 +951,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -948,7 +978,7 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -973,7 +1003,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -998,7 +1031,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1025,7 +1061,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat( @@ -1079,7 +1118,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1104,7 +1146,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1127,7 +1172,10 @@ public void testKibanaSystemRole() { kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( @@ -1205,7 +1253,10 @@ public void testKibanaSystemRole() { final boolean isAlsoAutoCreateIndex = indexName.startsWith(".logs-endpoint.actions-") || indexName.startsWith(".logs-endpoint.action.responses-"); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(isAlsoAutoCreateIndex)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(false)); assertThat( @@ -1289,9 +1340,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1330,9 +1381,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(indexAbstraction), is(true)); @@ -1364,7 +1415,10 @@ public void testKibanaSystemRole() { // Allow read-only assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat( @@ -1397,9 +1451,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(true)); @@ -1421,7 +1475,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1445,7 +1502,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1475,9 +1535,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); @@ -1511,7 +1571,10 @@ public void testKibanaSystemRole() { is(false) ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), + is(false) + ); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1562,9 +1625,9 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(indexAbstraction), is(true)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(indexAbstraction), is(true)); // Allow create and delete index, modifying aliases, and updating index settings - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAliasesAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndicesAliasesAction.NAME).test(indexAbstraction), is(true)); @@ -1586,7 +1649,7 @@ public void testKibanaSystemRole() { Arrays.asList(".asset-criticality.asset-criticality-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach(indexName -> { final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName); - assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); assertViewIndexMetadata(kibanaRole, indexName); }); @@ -1807,7 +1870,10 @@ public void testMonitoringUserRole() { monitoringUserRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(monitoringUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + monitoringUserRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat( monitoringUserRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false) @@ -1962,7 +2028,9 @@ public void testRemoteMonitoringAgentRole() { is(true) ); assertThat( - remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(monitoringIndex)), + remoteMonitoringAgentRole.indices() + .allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()) + .test(mockIndexAbstraction(monitoringIndex)), is(true) ); assertThat( @@ -2016,7 +2084,9 @@ public void testRemoteMonitoringAgentRole() { is(false) ); assertThat( - remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(metricbeatIndex)), + remoteMonitoringAgentRole.indices() + .allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()) + .test(mockIndexAbstraction(metricbeatIndex)), is(true) ); assertThat( @@ -2171,7 +2241,9 @@ public void testRemoteMonitoringCollectorRole() { is(false) ); assertThat( - remoteMonitoringCollectorRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), + remoteMonitoringCollectorRole.indices() + .allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()) + .test(mockIndexAbstraction(index)), is(false) ); assertThat( @@ -2424,7 +2496,10 @@ public void testReportingUserRole() { reportingUserRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + reportingUserRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat( reportingUserRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) @@ -2449,7 +2524,10 @@ public void testReportingUserRole() { reportingUserRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false) ); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + reportingUserRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), + is(false) + ); assertNoAccessAllowed(reportingUserRole, TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES); assertNoAccessAllowed(reportingUserRole, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); @@ -2667,7 +2745,10 @@ public void testBeatsAdminRole() { beatsAdminRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true) ); - assertThat(beatsAdminRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + beatsAdminRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(beatsAdminRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(beatsAdminRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( @@ -2727,13 +2808,16 @@ public void testBeatsSystemRole() { beatsSystemRole.indices().allowedIndicesMatcher("indices:foo").test(mockIndexAbstraction(randomAlphaOfLengthBetween(8, 24))), is(false) ); - assertThat(beatsSystemRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + beatsSystemRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(beatsSystemRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( beatsSystemRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false) ); - assertThat(beatsSystemRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat(beatsSystemRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertNoAccessAllowed(beatsSystemRole, TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES); assertNoAccessAllowed(beatsSystemRole, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); @@ -2772,13 +2856,16 @@ public void testAPMSystemRole() { final String index = ".monitoring-beats-" + randomIntBetween(10, 15); logger.info("APM beats monitoring index name [{}]", index); - assertThat(APMSystemRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + APMSystemRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat( APMSystemRole.indices().allowedIndicesMatcher("indices:data/write/index:op_type/create").test(mockIndexAbstraction(index)), is(true) ); assertThat(APMSystemRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); - assertThat(APMSystemRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat(APMSystemRole.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( APMSystemRole.indices().allowedIndicesMatcher("indices:data/write/index:op_type/index").test(mockIndexAbstraction(index)), @@ -3599,7 +3686,10 @@ private void assertAllIndicesAccessAllowed(Role role, String index) { is(true) ); assertThat(role.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat( @@ -3626,7 +3716,7 @@ private void assertReadWriteDocsAndMaintenanceButNotDeleteIndexAllowed(Role role assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher("indices:admin/refresh*").test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher("indices:admin/flush*").test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher("indices:admin/synced_flush").test(mockIndexAbstraction(index)), is(true)); @@ -3643,7 +3733,7 @@ private void assertReadWriteDocsButNotDeleteIndexAllowed(Role role, String index assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(mockIndexAbstraction(index)), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(true)); } private void assertOnlyReadAllowed(Role role, String index) { @@ -3651,7 +3741,10 @@ private void assertOnlyReadAllowed(Role role, String index) { role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat( role.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) @@ -3661,7 +3754,7 @@ private void assertOnlyReadAllowed(Role role, String index) { assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertNoAccessAllowed(role, TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES); assertNoAccessAllowed(role, XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2)); @@ -3696,7 +3789,10 @@ private void assertNoAccessAllowed(Role role, String index) { role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) ); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat( + role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(false) + ); assertThat( role.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(mockIndexAbstraction(index)), is(false) @@ -3706,7 +3802,7 @@ private void assertNoAccessAllowed(Role role, String index) { assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportUpdateAction.NAME).test(mockIndexAbstraction(index)), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(mockIndexAbstraction(index)), is(false)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(mockIndexAbstraction(index)), is(false)); } public void testLogstashAdminRole() { @@ -3761,7 +3857,10 @@ public void testLogstashAdminRole() { logstashAdminRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), is(true) ); - assertThat(logstashAdminRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(mockIndexAbstraction(index)), is(true)); + assertThat( + logstashAdminRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(mockIndexAbstraction(index)), + is(true) + ); assertThat( logstashAdminRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(mockIndexAbstraction(index)), is(true) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java index 31642cbf5e34f..3878977df9359 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java @@ -10,22 +10,22 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; -import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryAction; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; +import org.elasticsearch.action.admin.cluster.repositories.cleanup.TransportCleanupRepositoryAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; -import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; +import org.elasticsearch.action.admin.indices.readonly.TransportAddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.TransportUnpromotableShardRefreshAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.cluster.metadata.DataStream; @@ -82,15 +82,15 @@ public void testXPackUser() { PutComponentTemplateAction.NAME, TransportDeleteStoredScriptAction.TYPE.name(), UpdateJobAction.NAME, - CleanupRepositoryAction.NAME + TransportCleanupRepositoryAction.TYPE.name() ); checkClusterAccess(InternalUsers.XPACK_USER, role, randomFrom(sampleClusterActions), true); final List sampleIndexActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, RefreshAction.NAME, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -117,15 +117,15 @@ public void testXPackSecurityUser() { PutComponentTemplateAction.NAME, TransportDeleteStoredScriptAction.TYPE.name(), UpdateJobAction.NAME, - CleanupRepositoryAction.NAME + TransportCleanupRepositoryAction.TYPE.name() ); checkClusterAccess(InternalUsers.XPACK_SECURITY_USER, role, randomFrom(sampleClusterActions), true); final List sampleIndexActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, RefreshAction.NAME, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -150,9 +150,9 @@ public void testSecurityProfileUser() { final List sampleAllowedActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, RefreshAction.NAME, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -176,14 +176,14 @@ public void testAsyncSearchUser() { assertThat(role.application(), is(ApplicationPermission.NONE)); assertThat(role.remoteIndices(), is(RemoteIndicesPermission.NONE)); - checkClusterAccess(InternalUsers.ASYNC_SEARCH_USER, role, CancelTasksAction.NAME, true); + checkClusterAccess(InternalUsers.ASYNC_SEARCH_USER, role, TransportCancelTasksAction.NAME, true); checkClusterAccess(InternalUsers.ASYNC_SEARCH_USER, role, ClusterStateAction.NAME, false); final List sampleAllowedActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, RefreshAction.NAME, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -214,7 +214,7 @@ public void testStorageUser() { final List sampleDeniedActions = List.of( TransportGetAction.TYPE.name(), - BulkAction.NAME, + TransportBulkAction.NAME, TransportPutMappingAction.TYPE.name(), TransportDeleteIndexAction.TYPE.name() ); @@ -251,7 +251,7 @@ public void testDataStreamLifecycleUser() { IndicesStatsAction.NAME, TransportUpdateSettingsAction.TYPE.name(), DownsampleAction.NAME, - AddIndexBlockAction.NAME + TransportAddIndexBlockAction.TYPE.name() ); final List sampleSystemDataStreamActions = List.of( @@ -261,7 +261,7 @@ public void testDataStreamLifecycleUser() { IndicesStatsAction.NAME, TransportUpdateSettingsAction.TYPE.name(), DownsampleAction.NAME, - AddIndexBlockAction.NAME + TransportAddIndexBlockAction.TYPE.name() ); final String dataStream = randomAlphaOfLengthBetween(3, 12); checkIndexAccess(role, randomFrom(sampleIndexActions), dataStream, true); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java index ac5f7f2baf43e..4081d7108b0e7 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java @@ -6,9 +6,9 @@ */ package org.elasticsearch.xpack.enrich; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -301,7 +301,7 @@ public void testAsyncTaskExecute() throws Exception { assertThat(executeResponse.getTaskId(), is(not(nullValue()))); GetTaskRequest getPolicyTaskRequest = new GetTaskRequest().setTaskId(executeResponse.getTaskId()).setWaitForCompletion(true); assertBusy(() -> { - GetTaskResponse taskResponse = client().execute(GetTaskAction.INSTANCE, getPolicyTaskRequest).actionGet(); + GetTaskResponse taskResponse = client().execute(TransportGetTaskAction.TYPE, getPolicyTaskRequest).actionGet(); assertThat( ((ExecuteEnrichPolicyStatus) taskResponse.getTask().getTask().status()).getPhase(), is(ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE) diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java index 457da7f65294b..9f0b18679666b 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyExecutorTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; @@ -229,7 +229,7 @@ protected void Thread.currentThread().interrupt(); } - if (GetTaskAction.INSTANCE.equals(action)) { + if (TransportGetTaskAction.TYPE.equals(action)) { if (shouldGetTaskApiReturnTimeout.get() == false) { // This is the second call to the Get Task API, so count down the latch to let the main test logic know. secondGetTaskWasCalled.countDown(); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index 13e1df133f00b..8ce1e7f350ccb 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -14,9 +14,9 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; @@ -2073,7 +2073,7 @@ public void testRunnerCancel() throws Exception { ActionType randomActionType = randomFrom( EnrichReindexAction.INSTANCE, GetIndexAction.INSTANCE, - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, ForceMergeAction.INSTANCE, RefreshAction.INSTANCE, IndicesSegmentsAction.INSTANCE, diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index 8d7cbc5cd41be..bc4708cc19c1f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -7,8 +7,8 @@ package org.elasticsearch.xpack.esql.action; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; @@ -172,7 +172,7 @@ public void testCancel() throws Exception { rootTasks.addAll(tasks); }); var cancelRequest = new CancelTasksRequest().setTargetTaskId(rootTasks.get(0).taskId()).setReason("proxy timeout"); - client().execute(CancelTasksAction.INSTANCE, cancelRequest); + client().execute(TransportCancelTasksAction.TYPE, cancelRequest); assertBusy(() -> { List drivers = client(REMOTE_CLUSTER).admin() .cluster() diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 23fa3f862a3ff..92987db865ac7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -10,8 +10,8 @@ import org.apache.lucene.search.DocIdSetIterator; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.collect.Iterators; @@ -231,12 +231,12 @@ private void cancelTask(TaskId taskId) { CancelTasksRequest request = new CancelTasksRequest().setTargetTaskId(taskId).setReason("test cancel"); request.setWaitForCompletion(false); LOGGER.debug("--> cancelling task [{}] without waiting for completion", taskId); - client().admin().cluster().execute(CancelTasksAction.INSTANCE, request).actionGet(); + client().admin().cluster().execute(TransportCancelTasksAction.TYPE, request).actionGet(); scriptPermits.release(numberOfDocs()); request = new CancelTasksRequest().setTargetTaskId(taskId).setReason("test cancel"); request.setWaitForCompletion(true); LOGGER.debug("--> cancelling task [{}] with waiting for completion", taskId); - client().admin().cluster().execute(CancelTasksAction.INSTANCE, request).actionGet(); + client().admin().cluster().execute(TransportCancelTasksAction.TYPE, request).actionGet(); } /** diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java index 8675c27325b4b..cbdda089e8328 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java @@ -13,13 +13,13 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.ClusterState; @@ -137,7 +137,7 @@ public void testPut() throws Exception { AtomicInteger calledTimes = new AtomicInteger(0); client.setVerifier((action, request, listener) -> { calledTimes.incrementAndGet(); - assertThat(action, instanceOf(BulkAction.class)); + assertSame(TransportBulkAction.TYPE, action); assertThat(request, instanceOf(BulkRequest.class)); BulkRequest bulkRequest = (BulkRequest) request; bulkRequest.requests().forEach(dwr -> assertEquals(ILM_HISTORY_DATA_STREAM, dwr.index())); @@ -177,11 +177,11 @@ public void testPut() throws Exception { AtomicInteger calledTimes = new AtomicInteger(0); client.setVerifier((action, request, listener) -> { - if (action instanceof CreateIndexAction && request instanceof CreateIndexRequest) { + if (action == TransportCreateIndexAction.TYPE && request instanceof CreateIndexRequest) { return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); } calledTimes.incrementAndGet(); - assertThat(action, instanceOf(BulkAction.class)); + assertSame(TransportBulkAction.TYPE, action); assertThat(request, instanceOf(BulkRequest.class)); BulkRequest bulkRequest = (BulkRequest) request; bulkRequest.requests().forEach(dwr -> { @@ -230,7 +230,7 @@ public void testMultipleFlushes() throws Exception { long numberOfDocs = 400_000; CountDownLatch latch = new CountDownLatch((int) numberOfDocs); client.setVerifier((action, request, listener) -> { - assertThat(action, instanceOf(BulkAction.class)); + assertSame(TransportBulkAction.TYPE, action); assertThat(request, instanceOf(BulkRequest.class)); BulkRequest bulkRequest = (BulkRequest) request; List> realRequests = bulkRequest.requests(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java index f28f6eff25b04..57aba2bb80d68 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.SearchRequest; @@ -255,7 +255,7 @@ private void persistModelStateDocs(String jobId, String snapshotId, int numDocs) bulkRequest.add(indexRequest); } - BulkResponse bulkResponse = client().execute(BulkAction.INSTANCE, bulkRequest).actionGet(); + BulkResponse bulkResponse = client().execute(TransportBulkAction.TYPE, bulkRequest).actionGet(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java index aa85f78355fb3..2e16436736e89 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.PlainActionFuture; @@ -144,7 +144,7 @@ private void persistModelStateDocs(String jobId, String snapshotId, int numDocs) bulkRequest.add(indexRequest); } - BulkResponse bulkResponse = client().execute(BulkAction.INSTANCE, bulkRequest).actionGet(); + BulkResponse bulkResponse = client().execute(TransportBulkAction.TYPE, bulkRequest).actionGet(); assertFalse(bulkResponse.buildFailureMessage(), bulkResponse.hasFailures()); } } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java index 9daf353b11380..6572a6c286519 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; @@ -242,7 +242,7 @@ public void testAliasesMovedFromOldToNew() throws Exception { ) .alias(new Alias(AnnotationIndex.READ_ALIAS_NAME).isHidden(true)) .alias(new Alias(AnnotationIndex.WRITE_ALIAS_NAME).isHidden(true)); - client().execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet(); + client().execute(TransportCreateIndexAction.TYPE, createIndexRequest).actionGet(); // Because the old annotations index name began with .ml, it will trigger the new annotations index to be created. // When this happens the read alias should be changed to cover both indices, and the write alias should be diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index ceae2a680feb0..4437a36318452 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -94,22 +94,28 @@ private void deleteFilter(String filterId, ActionListener BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); bulkRequestBuilder.add(deleteRequest); bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), new ActionListener() { - @Override - public void onResponse(BulkResponse bulkResponse) { - if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { - listener.onFailure( - new ResourceNotFoundException("Could not delete filter with ID [" + filterId + "] because it does not exist") - ); - } else { - listener.onResponse(AcknowledgedResponse.TRUE); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportBulkAction.TYPE, + bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse bulkResponse) { + if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { + listener.onFailure( + new ResourceNotFoundException("Could not delete filter with ID [" + filterId + "] because it does not exist") + ); + } else { + listener.onResponse(AcknowledgedResponse.TRUE); + } } - } - @Override - public void onFailure(Exception e) { - listener.onFailure(ExceptionsHelper.serverError("Could not delete filter with ID [" + filterId + "]", e)); + @Override + public void onFailure(Exception e) { + listener.onFailure(ExceptionsHelper.serverError("Could not delete filter with ID [" + filterId + "]", e)); + } } - }); + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 19f99a329d309..104c92fe5dfd7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -11,8 +11,8 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -361,7 +361,7 @@ private void cancelResetTaskIfExists(String jobId, ActionListener liste executeAsyncWithOrigin( client, ML_ORIGIN, - CancelTasksAction.INSTANCE, + TransportCancelTasksAction.TYPE, cancelTasksRequest, ActionListener.wrap(cancelTasksResponse -> listener.onResponse(true), e -> { if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java index d31488cda02bd..e94dae01f236b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -97,7 +97,7 @@ protected void doExecute( executeAsyncWithOrigin( client, ML_ORIGIN, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequestBuilder.request(), new ActionListener() { @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java index 030e25ea7797a..d7071e4f973cc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportResetJobAction.java @@ -11,8 +11,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -157,7 +157,7 @@ private void waitExistingResetTaskToComplete( getTaskRequest.setTaskId(existingTaskId); getTaskRequest.setWaitForCompletion(true); getTaskRequest.setTimeout(request.timeout()); - executeAsyncWithOrigin(client, ML_ORIGIN, GetTaskAction.INSTANCE, getTaskRequest, ActionListener.wrap(getTaskResponse -> { + executeAsyncWithOrigin(client, ML_ORIGIN, TransportGetTaskAction.TYPE, getTaskRequest, ActionListener.wrap(getTaskResponse -> { TaskResult taskResult = getTaskResponse.getTask(); if (taskResult.isCompleted()) { listener.onResponse(AcknowledgedResponse.of(true)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java index c01c1f46b3d13..9d5abbfc06f65 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -10,8 +10,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.Client; @@ -215,7 +215,7 @@ private void isBlocked(Job job, RevertModelSnapshotAction.Request request, Actio executeAsyncWithOrigin( client, ML_ORIGIN, - GetTaskAction.INSTANCE, + TransportGetTaskAction.TYPE, getTaskRequest, ActionListener.wrap(r -> listener.onResponse(r.getTask().isCompleted() == false), e -> { if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java index 097be745996ab..a2bb420c1e705 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java @@ -10,9 +10,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -114,16 +114,22 @@ private void indexModelSnapshot(Result modelSnapshot, Consumer() { - @Override - public void onResponse(BulkResponse indexResponse) { - handler.accept(true); - } + executeAsyncWithOrigin( + client, + ML_ORIGIN, + TransportBulkAction.TYPE, + bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse indexResponse) { + handler.accept(true); + } - @Override - public void onFailure(Exception e) { - errorHandler.accept(e); + @Override + public void onFailure(Exception e) { + errorHandler.accept(e); + } } - }); + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java index 8623f456b2035..280984feab4d4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java @@ -9,9 +9,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; @@ -115,7 +115,7 @@ public static void createDestinationIndex( analyticsConfig.getHeaders(), ClientHelper.ML_ORIGIN, client, - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, createIndexRequest, listener ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index b502e0d6db341..b9b38cb07fa39 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -16,10 +16,10 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.MultiSearchRequest; @@ -524,7 +524,7 @@ private void storeTrainedModelAndDefinition( wrappedListener.onResponse(true); }, wrappedListener::onFailure); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequest.request(), bulkResponseActionListener); + executeAsyncWithOrigin(client, ML_ORIGIN, TransportBulkAction.TYPE, bulkRequest.request(), bulkResponseActionListener); } /** diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index f8f1e95fecd2e..1abb466a20f1a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -21,10 +21,10 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.MultiSearchRequest; @@ -1940,7 +1940,7 @@ public void removeJobFromCalendars(String jobId, ActionListener listene bulkUpdate.add(updateRequest); } if (bulkUpdate.numberOfActions() > 0) { - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkUpdate.request(), updateCalendarsListener); + executeAsyncWithOrigin(client, ML_ORIGIN, TransportBulkAction.TYPE, bulkUpdate.request(), updateCalendarsListener); } else { listener.onResponse(true); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java index 5630f16e63351..82d19f9d72273 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterService.java @@ -11,10 +11,10 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -218,7 +218,7 @@ public BulkResponse bulkIndexWithHeadersWithRetry( headers, ClientHelper.ML_ORIGIN, client, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, providedBulkRequest, listener ) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java index 9402a358dc305..ac71ab2b59865 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.support.PlainActionFuture; @@ -73,7 +73,7 @@ public void testCancelDownloadTaskCallsOnFailureWithErrorWhenCancellingFailsWith listener.onFailure(new Exception("cancel error")); return Void.TYPE; - }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); + }).when(client).execute(same(TransportCancelTasksAction.TYPE), any(), any()); var listener = new PlainActionFuture(); @@ -94,7 +94,7 @@ public void testCancelDownloadTaskCallsOnResponseNullWhenTheTaskNoLongerExistsWh listener.onFailure(new ResourceNotFoundException("task no longer there")); return Void.TYPE; - }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); + }).when(client).execute(same(TransportCancelTasksAction.TYPE), any(), any()); var listener = new PlainActionFuture(); @@ -150,6 +150,6 @@ private static void mockCancelTasksResponse(Client client, ListTasksResponse res listener.onResponse(response); return Void.TYPE; - }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); + }).when(client).execute(same(TransportCancelTasksAction.TYPE), any(), any()); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersisterTests.java index b19b27785a539..075c10ac6dc90 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/annotations/AnnotationPersisterTests.java @@ -9,10 +9,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; @@ -88,14 +88,14 @@ public void verifyNoMoreInteractionsWithMocks() { public void testPersistAnnotation_Create() throws IOException { doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(ANNOTATION_ID) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService); Annotation annotation = AnnotationTests.randomAnnotation(JOB_ID); Tuple result = persister.persistAnnotation(null, annotation); assertThat(result, is(equalTo(tuple(ANNOTATION_ID, annotation)))); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests, hasSize(1)); @@ -111,14 +111,14 @@ public void testPersistAnnotation_Create() throws IOException { public void testPersistAnnotation_Update() throws IOException { doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(ANNOTATION_ID) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService); Annotation annotation = AnnotationTests.randomAnnotation(JOB_ID); Tuple result = persister.persistAnnotation(ANNOTATION_ID, annotation); assertThat(result, is(equalTo(tuple(ANNOTATION_ID, annotation)))); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests, hasSize(1)); @@ -134,7 +134,7 @@ public void testPersistAnnotation_Update() throws IOException { public void testPersistMultipleAnnotationsWithBulk() { doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(ANNOTATION_ID) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService); persister.bulkPersisterBuilder(JOB_ID) @@ -145,7 +145,7 @@ public void testPersistMultipleAnnotationsWithBulk() { .persistAnnotation(AnnotationTests.randomAnnotation(JOB_ID)) .executeRequest(); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests, hasSize(1)); @@ -154,7 +154,7 @@ public void testPersistMultipleAnnotationsWithBulk() { public void testPersistMultipleAnnotationsWithBulk_LowBulkLimit() { doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(ANNOTATION_ID) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService, 2); persister.bulkPersisterBuilder(JOB_ID) @@ -165,7 +165,7 @@ public void testPersistMultipleAnnotationsWithBulk_LowBulkLimit() { .persistAnnotation(AnnotationTests.randomAnnotation(JOB_ID)) .executeRequest(); - verify(client, times(3)).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client, times(3)).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests, hasSize(3)); @@ -184,7 +184,7 @@ public void testPersistMultipleAnnotationsWithBulk_Failure() { .doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess("1"), bulkItemFailure("2") }, 0L))) // (2) .doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemFailure("2") }, 0L))) // (3) .when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); AnnotationPersister persister = new AnnotationPersister(resultsPersisterService); AnnotationPersister.Builder persisterBuilder = persister.bulkPersisterBuilder(JOB_ID) @@ -193,7 +193,7 @@ public void testPersistMultipleAnnotationsWithBulk_Failure() { ElasticsearchException e = expectThrows(ElasticsearchException.class, persisterBuilder::executeRequest); assertThat(e.getMessage(), containsString("Failed execution")); - verify(client, atLeastOnce()).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client, atLeastOnce()).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); List bulkRequests = bulkRequestCaptor.getAllValues(); assertThat(bulkRequests.get(0).numberOfActions(), equalTo(2)); // Original bulk request of size 2 diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java index e98c8a10b577f..1d52f278323dd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobTests.java @@ -9,10 +9,10 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.internal.Client; @@ -166,7 +166,7 @@ public void setup() throws Exception { when(client.execute(same(FlushJobAction.INSTANCE), flushJobRequests.capture())).thenReturn(flushJobFuture); doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { bulkItemSuccess(annotationDocId) }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); } public void testLookBackRunWithEndTime() throws Exception { @@ -334,7 +334,7 @@ public void testRealtimeRun() throws Exception { ); ArgumentCaptor bulkRequestArgumentCaptor = ArgumentCaptor.forClass(BulkRequest.class); - verify(client, atMost(2)).execute(eq(BulkAction.INSTANCE), bulkRequestArgumentCaptor.capture(), any()); + verify(client, atMost(2)).execute(eq(TransportBulkAction.TYPE), bulkRequestArgumentCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestArgumentCaptor.getValue(); assertThat(bulkRequest.requests(), hasSize(1)); IndexRequest indexRequest = (IndexRequest) bulkRequest.requests().get(0); @@ -383,7 +383,7 @@ public void testRealtimeRun() throws Exception { ); ArgumentCaptor bulkRequestArgumentCaptor = ArgumentCaptor.forClass(BulkRequest.class); - verify(client, atMost(2)).execute(eq(BulkAction.INSTANCE), bulkRequestArgumentCaptor.capture(), any()); + verify(client, atMost(2)).execute(eq(TransportBulkAction.TYPE), bulkRequestArgumentCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestArgumentCaptor.getValue(); assertThat(bulkRequest.requests(), hasSize(1)); IndexRequest indexRequest = (IndexRequest) bulkRequest.requests().get(0); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java index 998edd6044bab..2f3ccaa313b0d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -113,7 +113,7 @@ private Map testCreateDestinationIndex(DataFrameAnalysis analysi ArgumentCaptor createIndexRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); doAnswer(callListenerOnResponse(null)).when(client) - .execute(eq(CreateIndexAction.INSTANCE), createIndexRequestCaptor.capture(), any()); + .execute(eq(TransportCreateIndexAction.TYPE), createIndexRequestCaptor.capture(), any()); Map analysisSettings1 = Map.ofEntries( Map.entry("index.analysis.filter.bigram_joiner.max_shingle_size", "2"), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java index 94e0c533ef5fc..3daeed561e88b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProviderTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.PlainActionFuture; @@ -548,7 +548,7 @@ private void assertThatIndexRequestHasOperation(Client client, DocWriteRequest.O private void assertThatBulkIndexRequestHasOperation(Client client, DocWriteRequest.OpType operation) { var bulkIndexRequestArg = ArgumentCaptor.forClass(BulkRequest.class); - verify(client).execute(eq(BulkAction.INSTANCE), bulkIndexRequestArg.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkIndexRequestArg.capture(), any()); var requests = bulkIndexRequestArg.getValue().requests(); assertThat(bulkIndexRequestArg.getValue().requests().size(), Matchers.greaterThan(0)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index 654ce7bf965bd..0a7fc75115d2a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; @@ -85,7 +85,7 @@ public class JobResultsPersisterTests extends ESTestCase { public void setUpTests() { bulkRequestCaptor = ArgumentCaptor.forClass(BulkRequest.class); client = mock(Client.class); - doAnswer(withResponse(mock(BulkResponse.class))).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + doAnswer(withResponse(mock(BulkResponse.class))).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); OriginSettingClient originSettingClient = MockOriginSettingClient.mockOriginSettingClient(client, ClientHelper.ML_ORIGIN); persister = new JobResultsPersister(originSettingClient, buildResultsPersisterService(originSettingClient)); } @@ -111,7 +111,7 @@ public void testPersistBucket_OneRecord() { persister.bulkPersisterBuilder(JOB_ID).persistBucket(bucket).executeRequest(); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); assertEquals(2, bulkRequest.numberOfActions()); @@ -162,7 +162,7 @@ public void testPersistRecords() { persister.bulkPersisterBuilder(JOB_ID).persistRecords(records).executeRequest(); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); assertEquals(1, bulkRequest.numberOfActions()); @@ -197,7 +197,7 @@ public void testPersistInfluencers() { persister.bulkPersisterBuilder(JOB_ID).persistInfluencers(influencers).executeRequest(); - verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); assertEquals(1, bulkRequest.numberOfActions()); @@ -233,7 +233,7 @@ public void testBulkRequestExecutesWhenReachMaxDocs() { InOrder inOrder = inOrder(client); inOrder.verify(client).settings(); inOrder.verify(client, times(3)).threadPool(); - inOrder.verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); verifyNoMoreInteractions(client); } @@ -252,7 +252,7 @@ public void testPersistTimingStats() { InOrder inOrder = inOrder(client); inOrder.verify(client).settings(); inOrder.verify(client, times(3)).threadPool(); - inOrder.verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); verifyNoMoreInteractions(client); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); @@ -302,7 +302,7 @@ public void testPersistDatafeedTimingStats() { InOrder inOrder = inOrder(client); inOrder.verify(client).settings(); inOrder.verify(client, times(3)).threadPool(); - inOrder.verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); verifyNoMoreInteractions(client); // Refresh policy is set on the bulk request, not the individual index requests @@ -342,7 +342,7 @@ private void testPersistQuantilesSync(SearchHits searchHits, String expectedInde InOrder inOrder = inOrder(client); inOrder.verify(client).execute(eq(TransportSearchAction.TYPE), any(), any()); - inOrder.verify(client).execute(eq(BulkAction.INSTANCE), bulkRequestCaptor.capture(), any()); + inOrder.verify(client).execute(eq(TransportBulkAction.TYPE), bulkRequestCaptor.capture(), any()); inOrder.verifyNoMoreInteractions(); BulkRequest bulkRequest = bulkRequestCaptor.getValue(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java index f8ffed0864372..7aaeabac3af8b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java @@ -10,10 +10,10 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; @@ -263,7 +263,7 @@ public void testBulkRequestChangeOnFailures() { doAnswerWithResponses( new BulkResponse(new BulkItemResponse[] { BULK_ITEM_RESPONSE_FAILURE, BULK_ITEM_RESPONSE_SUCCESS }, 0L), new BulkResponse(new BulkItemResponse[0], 0L) - ).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + ).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -274,7 +274,7 @@ public void testBulkRequestChangeOnFailures() { resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> true, lastMessage::set); ArgumentCaptor captor = ArgumentCaptor.forClass(BulkRequest.class); - verify(client, times(2)).execute(eq(BulkAction.INSTANCE), captor.capture(), any()); + verify(client, times(2)).execute(eq(TransportBulkAction.TYPE), captor.capture(), any()); List requests = captor.getAllValues(); @@ -294,7 +294,7 @@ public void testBulkRequestChangeOnIrrecoverableFailures() { doAnswerWithResponses( new BulkResponse(new BulkItemResponse[] { irrecoverable, BULK_ITEM_RESPONSE_SUCCESS }, 0L), new BulkResponse(new BulkItemResponse[0], 0L) - ).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + ).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -305,7 +305,7 @@ public void testBulkRequestChangeOnIrrecoverableFailures() { () -> resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> true, (s) -> {}) ); - verify(client).execute(eq(BulkAction.INSTANCE), any(), any()); + verify(client).execute(eq(TransportBulkAction.TYPE), any(), any()); assertThat(ex.getMessage(), containsString("experienced failure that cannot be automatically retried.")); } @@ -313,7 +313,7 @@ public void testBulkRequestDoesNotRetryWhenSupplierIsFalse() { doAnswerWithResponses( new BulkResponse(new BulkItemResponse[] { BULK_ITEM_RESPONSE_FAILURE, BULK_ITEM_RESPONSE_SUCCESS }, 0L), new BulkResponse(new BulkItemResponse[0], 0L) - ).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + ).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -325,7 +325,7 @@ public void testBulkRequestDoesNotRetryWhenSupplierIsFalse() { ElasticsearchException.class, () -> resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> false, lastMessage::set) ); - verify(client, times(1)).execute(eq(BulkAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(TransportBulkAction.TYPE), any(), any()); assertThat(lastMessage.get(), is(nullValue())); } @@ -335,7 +335,7 @@ public void testBulkRequestRetriesConfiguredAttemptNumber() { resultsPersisterService.setMaxFailureRetries(maxFailureRetries); doAnswer(withResponse(new BulkResponse(new BulkItemResponse[] { BULK_ITEM_RESPONSE_FAILURE }, 0L))).when(client) - .execute(eq(BulkAction.INSTANCE), any(), any()); + .execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -346,7 +346,7 @@ public void testBulkRequestRetriesConfiguredAttemptNumber() { ElasticsearchException.class, () -> resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> true, lastMessage::set) ); - verify(client, times(maxFailureRetries + 1)).execute(eq(BulkAction.INSTANCE), any(), any()); + verify(client, times(maxFailureRetries + 1)).execute(eq(TransportBulkAction.TYPE), any(), any()); assertThat(lastMessage.get(), containsString("failed to index after [10] attempts. Will attempt again")); } @@ -355,7 +355,7 @@ public void testBulkRequestRetriesMsgHandlerIsCalled() { doAnswerWithResponses( new BulkResponse(new BulkItemResponse[] { BULK_ITEM_RESPONSE_FAILURE, BULK_ITEM_RESPONSE_SUCCESS }, 0L), new BulkResponse(new BulkItemResponse[0], 0L) - ).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + ).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(INDEX_REQUEST_FAILURE); @@ -366,7 +366,7 @@ public void testBulkRequestRetriesMsgHandlerIsCalled() { resultsPersisterService.bulkIndexWithRetry(bulkRequest, JOB_ID, () -> true, lastMessage::set); ArgumentCaptor captor = ArgumentCaptor.forClass(BulkRequest.class); - verify(client, times(2)).execute(eq(BulkAction.INSTANCE), captor.capture(), any()); + verify(client, times(2)).execute(eq(TransportBulkAction.TYPE), captor.capture(), any()); List requests = captor.getAllValues(); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java index 4b7819693aedb..ae1aa7072510d 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java @@ -11,9 +11,9 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -397,9 +397,8 @@ private ActionResponse verifyIndexInstalled( ActionRequest request, ActionListener listener ) { - if (action instanceof CreateIndexAction) { + if (action == TransportCreateIndexAction.TYPE) { calledTimes.incrementAndGet(); - assertThat(action, instanceOf(CreateIndexAction.class)); assertThat(request, instanceOf(CreateIndexRequest.class)); assertNotNull(listener); return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); @@ -416,9 +415,8 @@ private ActionResponse verifyIndexUpgraded( ActionRequest request, ActionListener listener ) { - if (action instanceof CreateIndexAction) { + if (action == TransportCreateIndexAction.TYPE) { indicesCreated.incrementAndGet(); - assertThat(action, instanceOf(CreateIndexAction.class)); assertThat(request, instanceOf(CreateIndexRequest.class)); assertNotNull(listener); return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index a276971762c81..9c3c34e2d63bd 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -13,8 +13,8 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -165,7 +165,7 @@ static void createIndex( } client.execute( - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, request, ActionListener.wrap(createIndexResponse -> startPersistentTask(job, listener, persistentTasksService), e -> { if (e instanceof ResourceAlreadyExistsException) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index b2e1ed42440a2..f4c420db47ac3 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -11,9 +11,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.broadcast.BroadcastResponse; @@ -142,7 +142,7 @@ protected void doNextBulk(BulkRequest request, ActionListener next job.getHeaders(), ClientHelper.ROLLUP_ORIGIN, client, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, request, nextPhase ); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java index b1455c4738623..ee8b4c79d1893 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java @@ -9,8 +9,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -67,12 +67,12 @@ public void testCreateIndexException() { doAnswer(invocation -> { requestCaptor.getValue().onFailure(new RuntimeException("something bad")); return null; - }).when(client).execute(eq(CreateIndexAction.INSTANCE), any(CreateIndexRequest.class), requestCaptor.capture()); + }).when(client).execute(eq(TransportCreateIndexAction.TYPE), any(CreateIndexRequest.class), requestCaptor.capture()); TransportPutRollupJobAction.createIndex(job, testListener, mock(PersistentTasksService.class), client, logger); // ResourceAlreadyExists should trigger a GetMapping next - verify(client).execute(eq(CreateIndexAction.INSTANCE), any(CreateIndexRequest.class), any()); + verify(client).execute(eq(TransportCreateIndexAction.TYPE), any(CreateIndexRequest.class), any()); } @SuppressWarnings({ "unchecked", "rawtypes" }) @@ -90,7 +90,7 @@ public void testIndexAlreadyExists() { doAnswer(invocation -> { requestCaptor.getValue().onFailure(new ResourceAlreadyExistsException(job.getConfig().getRollupIndex())); return null; - }).when(client).execute(eq(CreateIndexAction.INSTANCE), any(CreateIndexRequest.class), requestCaptor.capture()); + }).when(client).execute(eq(TransportCreateIndexAction.TYPE), any(CreateIndexRequest.class), requestCaptor.capture()); ArgumentCaptor requestCaptor2 = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { @@ -130,7 +130,7 @@ public void testIndexMetadata() throws InterruptedException { listenerCaptor.getValue().onFailure(new ResourceAlreadyExistsException(job.getConfig().getRollupIndex())); latch.countDown(); return null; - }).when(client).execute(eq(CreateIndexAction.INSTANCE), requestCaptor.capture(), listenerCaptor.capture()); + }).when(client).execute(eq(TransportCreateIndexAction.TYPE), requestCaptor.capture(), listenerCaptor.capture()); ArgumentCaptor requestCaptor2 = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index 1713be9feac65..a40f21c0de08d 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -11,9 +11,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.search.ClosePointInTimeRequest; @@ -636,7 +636,7 @@ private void handleSearchResponse(SearchResponse searchResponse, RefCounted refs if (bulkRequest.numberOfActions() > 0) { refs.mustIncRef(); clientWithOrigin.execute( - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequest, ActionListener.releaseAfter(listeners.acquire(bulkResponse -> { for (BulkItemResponse itemResponse : bulkResponse.getItems()) { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java index b2417d8f18fa5..fe9c1f37e7d49 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/apikey/ApiKeySingleNodeTests.java @@ -13,8 +13,8 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.TransportGetAction; @@ -211,7 +211,7 @@ public void testCreatingApiKeyWithNoAccess() { ElasticsearchSecurityException.class, () -> client().filterWithHeader(Map.of("Authorization", "ApiKey " + base64ApiKeyKeyValue)) .execute( - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, new CreateIndexRequest(randomFrom(randomAlphaOfLengthBetween(3, 8), SECURITY_MAIN_ALIAS)) ) .actionGet() diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index 7a0cb604f4ce9..8324763c57bd2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; @@ -700,7 +700,7 @@ public void testCreateIndexAliasesOnlyPermission() { basicAuthHeaderValue("aliases_only", SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING) ) ).admin().indices().prepareCreate("test_1")::get, - CreateIndexAction.NAME, + TransportCreateIndexAction.TYPE.name(), "aliases_only" ); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java index 159228dd1c1b2..a3a4f1b074232 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/WriteActionsTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.TransportUpdateAction; @@ -55,14 +55,14 @@ public void testIndex() { assertThrowsAuthorizationExceptionDefaultUsers( prepareIndex("index1").setId("id").setSource("field", "value")::get, - BulkAction.NAME + "[s]" + TransportBulkAction.NAME + "[s]" ); prepareIndex("test4").setId("id").setSource("field", "value").get(); // the missing index gets automatically created (user has permissions for that), but indexing fails due to missing authorization assertThrowsAuthorizationExceptionDefaultUsers( prepareIndex("missing").setId("id").setSource("field", "value")::get, - BulkAction.NAME + "[s]" + TransportBulkAction.NAME + "[s]" ); ensureGreen(); } @@ -72,7 +72,7 @@ public void testDelete() { prepareIndex("test1").setId("id").setSource("field", "value").get(); assertEquals(RestStatus.OK, client().prepareDelete("test1", "id").get().status()); - assertThrowsAuthorizationExceptionDefaultUsers(client().prepareDelete("index1", "id")::get, BulkAction.NAME + "[s]"); + assertThrowsAuthorizationExceptionDefaultUsers(client().prepareDelete("index1", "id")::get, TransportBulkAction.NAME + "[s]"); expectThrows(IndexNotFoundException.class, () -> client().prepareDelete("test4", "id").get()); ensureGreen(); @@ -130,7 +130,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[1].getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); assertThat(bulkResponse.getItems()[1].getFailure().getIndex(), equalTo("index1")); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[1].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[1].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[1].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -145,7 +145,7 @@ public void testBulk() { // the missing index gets automatically created (user has permissions for that), but indexing fails due to missing authorization assertThat(bulkResponse.getItems()[3].getFailure().getIndex(), equalTo("missing")); assertThat(bulkResponse.getItems()[3].getFailure().getCause(), instanceOf(ElasticsearchSecurityException.class)); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[3].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[3].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[3].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -158,7 +158,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[5].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[5].getOpType(), equalTo(DocWriteRequest.OpType.DELETE)); assertThat(bulkResponse.getItems()[5].getFailure().getIndex(), equalTo("index1")); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[5].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[5].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[5].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -171,7 +171,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[7].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[7].getOpType(), equalTo(DocWriteRequest.OpType.DELETE)); assertThat(bulkResponse.getItems()[7].getFailure().getIndex(), equalTo("missing")); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[7].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[7].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[7].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -188,7 +188,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[10].isFailed(), equalTo(true)); assertThat(bulkResponse.getItems()[10].getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertThat(bulkResponse.getItems()[10].getFailure().getIndex(), equalTo("index1")); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[10].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[10].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[10].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") @@ -203,7 +203,7 @@ public void testBulk() { assertThat(bulkResponse.getItems()[12].getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertThat(bulkResponse.getItems()[12].getFailure().getIndex(), equalTo("missing")); assertThat(bulkResponse.getItems()[12].getFailure().getCause(), instanceOf(ElasticsearchSecurityException.class)); - assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[12].getFailure().getCause(), BulkAction.NAME + "[s]"); + assertAuthorizationExceptionDefaultUsers(bulkResponse.getItems()[12].getFailure().getCause(), TransportBulkAction.NAME + "[s]"); assertThat( bulkResponse.getItems()[12].getFailure().getCause().getMessage(), containsString("[indices:data/write/bulk[s]] is unauthorized") diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 9667fd336112f..ec0e54e96f1af 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -445,7 +444,7 @@ private void createApiKeyAndIndexIt( () -> executeAsyncWithOrigin( client, SECURITY_ORIGIN, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(indexResponse -> { assert request.getId().equals(indexResponse.getId()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStore.java index c2d8be1c26629..e59bcff2c9240 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStore.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest.OpType; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.DeleteRequest; @@ -155,7 +154,7 @@ void createToken( executeAsyncWithOrigin( client, SECURITY_ORIGIN, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(response -> { assert DocWriteResponse.Result.CREATED == response.getResult() diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 629a1a476995f..194440722545a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -19,7 +19,7 @@ import java.util.function.Consumer; import java.util.function.Predicate; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.elasticsearch.action.bulk.TransportBulkAction.LAZY_ROLLOVER_ORIGIN; import static org.elasticsearch.action.support.replication.PostWriteRefresh.POST_WRITE_REFRESH_ORIGIN; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 39d14d6685553..320dd4c6f8e09 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -18,9 +18,9 @@ import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.bulk.SimulateBulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.get.TransportMultiGetAction; import org.elasticsearch.action.index.TransportIndexAction; @@ -258,7 +258,7 @@ static boolean checkSameUserPermissions(String action, TransportRequest request, private static boolean shouldAuthorizeIndexActionNameOnly(String action, TransportRequest request) { switch (action) { - case BulkAction.NAME: + case TransportBulkAction.NAME: case SimulateBulkAction.NAME: case TransportIndexAction.NAME: case TransportDeleteAction.NAME: diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index 9b06fe23b903a..55be659512c52 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BackoffPolicy; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; @@ -748,7 +747,7 @@ void createNewProfile(Subject subject, String uid, ActionListener liste () -> executeAsyncWithOrigin( client, getActionOrigin(), - BulkAction.INSTANCE, + TransportBulkAction.TYPE, bulkRequest, TransportBulkAction.unwrappingSingleItemBulkResponse(ActionListener.wrap(indexResponse -> { assert docId.equals(indexResponse.getId()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 57ea876935d39..540a0758db43a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -16,10 +16,10 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; @@ -174,7 +174,7 @@ protected void indexRequests.add(indexRequest); final IndexResponse response = new IndexResponse(new ShardId("test", "test", 0), indexRequest.id(), 1, 1, 1, true); listener.onResponse((Response) response); - } else if (BulkAction.NAME.equals(action.name())) { + } else if (TransportBulkAction.NAME.equals(action.name())) { assertThat(request, instanceOf(BulkRequest.class)); BulkRequest bulkRequest = (BulkRequest) request; bulkRequests.add(bulkRequest); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 1754ffe3e9812..d2ca214723416 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -18,11 +18,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -261,7 +261,7 @@ public void testCreateApiKeyUsesBulkIndexAction() throws Exception { assertThat(indexRequest.opType(), is(DocWriteRequest.OpType.CREATE)); bulkActionInvoked.set(true); return null; - }).when(client).execute(eq(BulkAction.INSTANCE), any(BulkRequest.class), any()); + }).when(client).execute(eq(TransportBulkAction.TYPE), any(BulkRequest.class), any()); service.createApiKey(authentication, createApiKeyRequest, Set.of(), new PlainActionFuture<>()); assertBusy(() -> assertTrue(bulkActionInvoked.get())); } @@ -627,7 +627,7 @@ public void testCreateApiKeyWillCacheOnCreation() { ) ); return null; - }).when(client).execute(eq(BulkAction.INSTANCE), any(BulkRequest.class), any()); + }).when(client).execute(eq(TransportBulkAction.TYPE), any(BulkRequest.class), any()); final Cache> apiKeyAuthCache = service.getApiKeyAuthCache(); assertNull(apiKeyAuthCache.get(createApiKeyRequest.getId())); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index 8ec06c7273bbd..756d53285a8f6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportAutoPutMappingAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; @@ -19,7 +19,7 @@ import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.get.TransportMultiGetAction; @@ -134,9 +134,9 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportAutoPutMappingAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(index), is(false)); @@ -149,9 +149,9 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportAutoPutMappingAction.TYPE.name()).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(profilingIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(profilingIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(profilingIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(profilingIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(profilingIndex), is(true)); @@ -163,9 +163,9 @@ public void testElasticFleetServerPrivileges() { assertThat(role.indices().allowedIndicesMatcher(TransportAutoPutMappingAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(index), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(index), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(index), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(index), is(true)); @@ -187,9 +187,9 @@ public void testElasticFleetServerPrivileges() { ).forEach(index -> { final IndexAbstraction dotFleetIndex = mockIndexAbstraction(index); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(dotFleetIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(dotFleetIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(dotFleetIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(dotFleetIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(dotFleetIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(dotFleetIndex), is(true)); @@ -202,9 +202,9 @@ public void testElasticFleetServerPrivileges() { final IndexAbstraction dotFleetSecretsIndex = mockIndexAbstraction(".fleet-secrets" + randomAlphaOfLengthBetween(1, 20)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(dotFleetSecretsIndex), is(false)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(dotFleetSecretsIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(dotFleetSecretsIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(dotFleetSecretsIndex), is(false)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(dotFleetSecretsIndex), is(false)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(dotFleetSecretsIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(dotFleetSecretsIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(dotFleetSecretsIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(dotFleetSecretsIndex), is(true)); @@ -221,9 +221,9 @@ public void testElasticFleetServerPrivileges() { final IndexAbstraction apmSampledTracesIndex = mockIndexAbstraction("traces-apm.sampled-" + randomAlphaOfLengthBetween(1, 20)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(apmSampledTracesIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(apmSampledTracesIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(apmSampledTracesIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(apmSampledTracesIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(apmSampledTracesIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(apmSampledTracesIndex), is(true)); @@ -371,11 +371,11 @@ public void testElasticEnterpriseSearchServerAccount() { ).forEach(index -> { final IndexAbstraction enterpriseSearchIndex = mockIndexAbstraction(index); assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(enterpriseSearchIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(enterpriseSearchIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(TransportBulkAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportMultiGetAction.NAME).test(enterpriseSearchIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(enterpriseSearchIndex), is(true)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 4330dc3171047..861b21403b2b0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -20,8 +20,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; @@ -42,12 +42,12 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.bulk.BulkShardResponse; import org.elasticsearch.action.bulk.MappingUpdatePerformer; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -1430,14 +1430,14 @@ public void testCreateIndexWithAliasWithoutPermissions() { final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); assertThrowsAuthorizationException( - () -> authorize(authentication, CreateIndexAction.NAME, request), + () -> authorize(authentication, TransportCreateIndexAction.TYPE.name(), request), TransportIndicesAliasesAction.NAME, "test user" ); verify(auditTrail).accessGranted( eq(requestId), eq(authentication), - eq(CreateIndexAction.NAME), + eq(TransportCreateIndexAction.TYPE.name()), eq(request), authzInfoRoles(new String[] { role.getName() }) ); @@ -1467,12 +1467,12 @@ public void testCreateIndexWithAlias() { roleMap.put("a_all", role); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); - authorize(authentication, CreateIndexAction.NAME, request); + authorize(authentication, TransportCreateIndexAction.TYPE.name(), request); verify(auditTrail).accessGranted( eq(requestId), eq(authentication), - eq(CreateIndexAction.NAME), + eq(TransportCreateIndexAction.TYPE.name()), eq(request), authzInfoRoles(new String[] { role.getName() }) ); @@ -1543,7 +1543,7 @@ public void testDenialErrorMessagesForBulkIngest() throws Exception { RoleDescriptor role = new RoleDescriptor( "some_indices_" + randomAlphaOfLengthBetween(3, 6), null, - new IndicesPrivileges[] { IndicesPrivileges.builder().indices(index).privileges(BulkAction.NAME).build() }, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices(index).privileges(TransportBulkAction.NAME).build() }, null ); User user = new User(randomAlphaOfLengthBetween(6, 8), role.getName()); @@ -2053,7 +2053,10 @@ public void testGrantAllRestrictedUserCannotExecuteOperationAgainstSecurityIndic List> requests = new ArrayList<>(); requests.add( - new Tuple<>(BulkAction.NAME + "[s]", new DeleteRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), "id")) + new Tuple<>( + TransportBulkAction.NAME + "[s]", + new DeleteRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), "id") + ) ); requests.add( new Tuple<>( @@ -2062,7 +2065,7 @@ public void testGrantAllRestrictedUserCannotExecuteOperationAgainstSecurityIndic ) ); requests.add( - new Tuple<>(BulkAction.NAME + "[s]", new IndexRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7))) + new Tuple<>(TransportBulkAction.NAME + "[s]", new IndexRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7))) ); requests.add( new Tuple<>( @@ -2327,19 +2330,19 @@ public void testSuperusersCannotExecuteWriteOperationAgainstSecurityIndex() { List> requests = new ArrayList<>(); requests.add( new Tuple<>( - BulkAction.NAME + "[s]", + TransportBulkAction.NAME + "[s]", createBulkShardRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), DeleteRequest::new) ) ); requests.add( new Tuple<>( - BulkAction.NAME + "[s]", + TransportBulkAction.NAME + "[s]", createBulkShardRequest(randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), UpdateRequest::new) ) ); requests.add( new Tuple<>( - BulkAction.NAME + "[s]", + TransportBulkAction.NAME + "[s]", createBulkShardRequest( randomFrom(SECURITY_MAIN_ALIAS, INTERNAL_SECURITY_MAIN_INDEX_7), (index, id) -> new IndexRequest(index).id(id) @@ -2500,7 +2503,7 @@ public void testCompositeActionsIndicesAreCheckedAtTheShardLevel() { request = mockRequest; } case 3 -> { - action = BulkAction.NAME + "[s]"; + action = TransportBulkAction.NAME + "[s]"; request = createBulkShardRequest("index", (index, id) -> new IndexRequest(index).id(id)); } case 4 -> { @@ -2540,7 +2543,7 @@ public void testCompositeActionsIndicesAreCheckedAtTheShardLevel() { } public void testAuthorizationOfSingleActionMultipleIndicesBulkItems() { - final String action = BulkAction.NAME + "[s]"; + final String action = TransportBulkAction.NAME + "[s]"; final BulkItemRequest[] items; final DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values()); // the "good role" authorizes all the bulk items @@ -2788,7 +2791,7 @@ public void testAuthorizationOfSingleActionMultipleIndicesBulkItems() { } public void testAuthorizationOfMultipleActionsSingleIndexBulkItems() { - final String action = BulkAction.NAME + "[s]"; + final String action = TransportBulkAction.NAME + "[s]"; final AtomicInteger idCounter = new AtomicInteger(); final Set actionTypes = new HashSet<>(); final Set deleteItems = new HashSet<>(); @@ -2929,7 +2932,7 @@ public void testAuthorizationOfMultipleActionsSingleIndexBulkItems() { } public void testAuthorizationOfIndividualIndexAndDeleteBulkItems() { - final String action = BulkAction.NAME + "[s]"; + final String action = TransportBulkAction.NAME + "[s]"; final BulkItemRequest[] items = { new BulkItemRequest(1, new DeleteRequest("concrete-index", "c1")), new BulkItemRequest(2, new IndexRequest("concrete-index").id("c2")), @@ -3019,7 +3022,7 @@ public void testAuthorizationOfIndividualIndexAndDeleteBulkItems() { } public void testAuthorizationOfIndividualBulkItemsWithDateMath() { - final String action = BulkAction.NAME + "[s]"; + final String action = TransportBulkAction.NAME + "[s]"; final BulkItemRequest[] items = { new BulkItemRequest(1, new IndexRequest("").id("dy1")), new BulkItemRequest(2, new DeleteRequest("", "dy2")), // resolves to same as above @@ -3089,7 +3092,7 @@ private static Tuple randomCompositeRequest() { case 0 -> Tuple.tuple(TransportMultiGetAction.NAME, new MultiGetRequest().add("index", "id")); case 1 -> Tuple.tuple(TransportMultiSearchAction.TYPE.name(), new MultiSearchRequest().add(new SearchRequest())); case 2 -> Tuple.tuple(MultiTermVectorsAction.NAME, new MultiTermVectorsRequest().add("index", "id")); - case 3 -> Tuple.tuple(BulkAction.NAME, new BulkRequest().add(new DeleteRequest("index", "id"))); + case 3 -> Tuple.tuple(TransportBulkAction.NAME, new BulkRequest().add(new DeleteRequest("index", "id"))); case 4 -> Tuple.tuple("indices:data/read/mpercolate", new MockCompositeIndicesRequest()); case 5 -> Tuple.tuple("indices:data/read/msearch/template", new MockCompositeIndicesRequest()); case 6 -> Tuple.tuple("indices:data/read/search/template", new MockCompositeIndicesRequest()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java index 54ef4a19e182d..a2b6bf0c33dc8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationUtilsTests.java @@ -30,7 +30,7 @@ import java.util.concurrent.CountDownLatch; import java.util.function.Consumer; -import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction.TASKS_ORIGIN; import static org.hamcrest.Matchers.is; /** diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 4ad7c61d45d63..39eda23e35eec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.index.TransportIndexAction; @@ -2132,7 +2132,8 @@ public void testGetRoleForCrossClusterAccessAuthentication() throws Exception { is(false == emptyRemoteRole) ); assertThat( - role.authorize(CreateIndexAction.NAME, Sets.newHashSet("index1"), indexMetadata.getIndicesLookup(), emptyCache).isGranted(), + role.authorize(TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("index1"), indexMetadata.getIndicesLookup(), emptyCache) + .isGranted(), is(false) ); assertThat( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index 0760eeafc2f77..11b8598768667 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetItemResponse; @@ -623,7 +623,7 @@ public void testSecurityProfileOrigin() { final ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(expectedException); return null; - }).when(client).execute(eq(BulkAction.INSTANCE), any(BulkRequest.class), anyActionListener()); + }).when(client).execute(eq(TransportBulkAction.TYPE), any(BulkRequest.class), anyActionListener()); final PlainActionFuture future1 = new PlainActionFuture<>(); profileService.activateProfile(AuthenticationTestHelper.builder().realm().build(), future1); diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryStoreTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryStoreTests.java index 082b097df684b..750fdd40c12d6 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryStoreTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotHistoryStoreTests.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.slm.history; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; @@ -137,7 +137,7 @@ public void testPut() throws Exception { AtomicInteger calledTimes = new AtomicInteger(0); client.setVerifier((action, request, listener) -> { - if (action instanceof CreateIndexAction && request instanceof CreateIndexRequest) { + if (action == TransportCreateIndexAction.TYPE && request instanceof CreateIndexRequest) { return new CreateIndexResponse(true, true, ((CreateIndexRequest) request).index()); } calledTimes.incrementAndGet(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/SpatialDiskUsageIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/SpatialDiskUsageIT.java index 43c72642c84a4..a3dd8cdf5bcbb 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/SpatialDiskUsageIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/SpatialDiskUsageIT.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.spatial; import org.apache.lucene.tests.geo.GeoTestUtil; -import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageAction; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageRequest; import org.elasticsearch.action.admin.indices.diskusage.AnalyzeIndexDiskUsageResponse; +import org.elasticsearch.action.admin.indices.diskusage.TransportAnalyzeIndexDiskUsageAction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -89,7 +89,7 @@ private void doTestSpatialField(String type) throws Exception { prepareIndex(index).setId("id-" + i).setSource(doc).get(); } AnalyzeIndexDiskUsageResponse resp = client().execute( - AnalyzeIndexDiskUsageAction.INSTANCE, + TransportAnalyzeIndexDiskUsageAction.TYPE, new AnalyzeIndexDiskUsageRequest(new String[] { index }, AnalyzeIndexDiskUsageRequest.DEFAULT_INDICES_OPTIONS, true) ).actionGet(); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java index e3d9fa3aff671..79b9458be4ed2 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/TransformIndex.java @@ -14,8 +14,8 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -191,7 +191,7 @@ static void createDestinationIndex( config.getHeaders(), TRANSFORM_ORIGIN, client, - CreateIndexAction.INSTANCE, + TransportCreateIndexAction.TYPE, request, ActionListener.wrap(createIndexResponse -> { listener.onResponse(true); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index c68c73fd71d9e..66c618bc07c46 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -14,10 +14,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.SearchRequest; @@ -170,7 +170,7 @@ protected void doNextBulk(BulkRequest request, ActionListener next transformConfig.getHeaders(), ClientHelper.TRANSFORM_ORIGIN, client, - BulkAction.INSTANCE, + TransportBulkAction.TYPE, request, ActionListener.wrap(bulkResponse -> handleBulkResponse(bulkResponse, nextPhase), nextPhase::onFailure) ); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformIndexTests.java index 87b65978f667e..ce43a353ff414 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/TransformIndexTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.ActionTestUtils; @@ -151,7 +151,7 @@ public void testCreateDestinationIndex() throws IOException { ); ArgumentCaptor createIndexRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class); - verify(client).execute(eq(CreateIndexAction.INSTANCE), createIndexRequestCaptor.capture(), any()); + verify(client).execute(eq(TransportCreateIndexAction.TYPE), createIndexRequestCaptor.capture(), any()); verify(client, atLeastOnce()).threadPool(); verifyNoMoreInteractions(client); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index b75ac51c3510f..776f649300aa4 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -11,11 +11,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor2; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchResponse; @@ -472,7 +472,7 @@ public void testPutTriggeredWatches() throws Exception { listener.onResponse(new BulkResponse(bulkItemResponse, 123)); return null; - }).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + }).when(client).execute(eq(TransportBulkAction.TYPE), any(), any()); BulkResponse response = triggeredWatchStore.putAll(triggeredWatches); assertThat(response.hasFailures(), is(false)); From 64847b39d2ea9c3b3a7157c84a5e8dc9b23ce219 Mon Sep 17 00:00:00 2001 From: David Turner Date: Sun, 31 Mar 2024 21:51:30 +0100 Subject: [PATCH 38/69] AwaitsFix for #104081 --- .../xpack/ml/integration/MlDistributedFailureIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 33fd7c108863b..a2b00974d4038 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -144,6 +144,7 @@ public void testLoseDedicatedMasterNode() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104081") public void testFullClusterRestart() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); ensureStableCluster(); From b643abab3b9749cf32ad3561de9b2233abaa978a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Mon, 1 Apr 2024 01:41:52 +0200 Subject: [PATCH 39/69] Harden usage of XML document builder in build-conventions (#106874) While `LicenseHeadersTask` only uses `DocumentBuilderFactory` internally to parse `rat.xml` files (which are the output of running Apache RAT on Elasticsearch codebase files), it is a good practice to disable XXE features even if it's part of checks that are run on developers machines. --- .../precommit/LicenseHeadersTask.java | 27 +++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java index 76b117f8f5308..e49feb4289586 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java @@ -25,6 +25,7 @@ import org.gradle.api.file.FileCollection; import org.gradle.api.file.ProjectLayout; import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.ListProperty; import org.gradle.api.tasks.CacheableTask; import org.gradle.api.tasks.IgnoreEmptyDirectories; @@ -39,13 +40,12 @@ import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; -import org.gradle.api.model.ObjectFactory; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; + import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.io.Serializable; import java.io.Writer; import java.nio.file.Files; import java.util.ArrayList; @@ -53,7 +53,9 @@ import java.util.List; import java.util.stream.Collectors; import javax.inject.Inject; -import java.io.Serializable; +import javax.xml.XMLConstants; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; /** * Checks files for license headers.. @@ -232,7 +234,7 @@ private ClaimStatistic toXmlReportFile(ReportConfiguration config, Writer writer private static List unapprovedFiles(File xmlReportFile) { try { - NodeList resourcesNodes = DocumentBuilderFactory.newInstance() + NodeList resourcesNodes = createXmlDocumentBuilderFactory() .newDocumentBuilder() .parse(xmlReportFile) .getElementsByTagName("resource"); @@ -249,6 +251,21 @@ private static List unapprovedFiles(File xmlReportFile) { } } + private static DocumentBuilderFactory createXmlDocumentBuilderFactory() throws ParserConfigurationException { + final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + dbf.setXIncludeAware(false); + dbf.setIgnoringComments(true); + dbf.setExpandEntityReferences(false); + dbf.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, ""); + dbf.setAttribute(XMLConstants.ACCESS_EXTERNAL_SCHEMA, ""); + dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); + dbf.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + dbf.setFeature("http://xml.org/sax/features/external-general-entities", false); + dbf.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + dbf.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false); + return dbf; + } + private static List elementList(NodeList resourcesNodes) { List nodeList = new ArrayList<>(resourcesNodes.getLength()); for (int idx = 0; idx < resourcesNodes.getLength(); idx++) { From de6ece610e6abdfb288803a8ac7bcbc2e976fbc0 Mon Sep 17 00:00:00 2001 From: William Brafford Date: Sun, 31 Mar 2024 20:57:25 -0400 Subject: [PATCH 40/69] Check that operations on the Kibana system index use system index thread pools (#106915) * Add test to verify Kibana's access to system thread pools * Block all relevant threads on any number of nodes * Fill threadpool queues before testing requests that should be blocked --- .../kibana/KibanaThreadPoolTests.java | 52 +++++++++ .../indices/SystemIndexThreadPoolTests.java | 108 ++++++++++++++++++ 2 files changed, 160 insertions(+) create mode 100644 modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java create mode 100644 test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java diff --git a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java new file mode 100644 index 0000000000000..0974fd6d36b18 --- /dev/null +++ b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.kibana; + +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.indices.SystemIndexThreadPoolTests; +import org.elasticsearch.plugins.Plugin; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; + +public class KibanaThreadPoolTests extends SystemIndexThreadPoolTests { + + @Override + protected Collection> nodePlugins() { + return Set.of(KibanaPlugin.class); + } + + public void testKibanaThreadPool() { + runWithBlockedThreadPools(() -> { + // index documents + String idToDelete = client().prepareIndex(".kibana").setSource(Map.of("foo", "delete me!")).get().getId(); + String idToUpdate = client().prepareIndex(".kibana").setSource(Map.of("foo", "update me!")).get().getId(); + + // bulk index, delete, and update + Client bulkClient = client(); + BulkResponse response = bulkClient.prepareBulk(".kibana") + .add(bulkClient.prepareIndex(".kibana").setSource(Map.of("foo", "search me!"))) + .add(bulkClient.prepareDelete(".kibana", idToDelete)) + .add(bulkClient.prepareUpdate().setId(idToUpdate).setDoc(Map.of("foo", "I'm updated!"))) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + assertNoFailures(response); + + // match-all search + assertHitCount(client().prepareSearch(".kibana").setQuery(QueryBuilders.matchAllQuery()), 2); + }); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java new file mode 100644 index 0000000000000..b97c39ce70792 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.indices; + +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Phaser; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.startsWith; + +/** + * Tests to verify that system indices are bypassing user-space thread pools + * + *

We can block thread pools by setting them to one thread and no queue, then submitting + * threads that wait on a countdown latch. This lets us verify that operations on system indices + * are being directed to other thread pools.

+ * + *

When implementing this class, don't forget to override {@link ESIntegTestCase#nodePlugins()} if + * the relevant system index is defined in a plugin.

+ */ +public abstract class SystemIndexThreadPoolTests extends ESIntegTestCase { + + private static final String USER_INDEX = "user_index"; + + // For system indices that use ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS, we'll want to + // block normal system index thread pools as well. + protected Set threadPoolsToBlock() { + return Set.of(ThreadPool.Names.GET, ThreadPool.Names.WRITE, ThreadPool.Names.SEARCH); + } + + protected void runWithBlockedThreadPools(Runnable runnable) { + Phaser phaser = new Phaser(); + Runnable waitAction = () -> { + phaser.arriveAndAwaitAdvance(); + phaser.arriveAndAwaitAdvance(); + }; + phaser.register(); // register this test's thread + + for (String nodeName : internalCluster().getNodeNames()) { + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + for (String threadPoolName : threadPoolsToBlock()) { + ThreadPool.Info info = threadPool.info(threadPoolName); + phaser.bulkRegister(info.getMax()); + for (int i = 0; i < info.getMax(); i++) { + threadPool.executor(threadPoolName).submit(waitAction); + } + } + } + phaser.arriveAndAwaitAdvance(); + try { + runnable.run(); + } finally { + phaser.arriveAndAwaitAdvance(); + } + } + + public void testUserThreadPoolsAreBlocked() { + assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); + + runWithBlockedThreadPools(this::assertThreadPoolsBlocked); + + assertAcked(client().admin().indices().prepareDelete(USER_INDEX)); + } + + private void assertThreadPoolsBlocked() { + fillThreadPoolQueues(); // rejections are easier to check than timeouts + + var e1 = expectThrows( + EsRejectedExecutionException.class, + () -> client().prepareIndex(USER_INDEX).setSource(Map.of("foo", "bar")).get() + ); + assertThat(e1.getMessage(), startsWith("rejected execution of TimedRunnable")); + var e2 = expectThrows(EsRejectedExecutionException.class, () -> client().prepareGet(USER_INDEX, "id").get()); + assertThat(e2.getMessage(), startsWith("rejected execution of ActionRunnable")); + var e3 = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch(USER_INDEX).setQuery(QueryBuilders.matchAllQuery()).get() + ); + assertThat(e3.getMessage(), startsWith("all shards failed")); + } + + private void fillThreadPoolQueues() { + for (String nodeName : internalCluster().getNodeNames()) { + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + for (String threadPoolName : threadPoolsToBlock()) { + ThreadPool.Info info = threadPool.info(threadPoolName); + + // fill up the queue + for (int i = 0; i < info.getQueueSize().singles(); i++) { + threadPool.executor(threadPoolName).submit(() -> {}); + } + } + } + } +} From 8644d3d11a18581cb244a674f52957bf05a51048 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 1 Apr 2024 14:23:01 +0200 Subject: [PATCH 41/69] Remove unused o.e.i.m.extras.RankFeatureQueryBuilders (#106951) This is unused and the last commit referencing it was in 21. --- .../extras/RankFeatureQueryBuilders.java | 65 ------------------- 1 file changed, 65 deletions(-) delete mode 100644 modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilders.java diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilders.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilders.java deleted file mode 100644 index 649685667b2fe..0000000000000 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilders.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.index.mapper.extras; - -public final class RankFeatureQueryBuilders { - private RankFeatureQueryBuilders() {} - - /** - * Return a new {@link RankFeatureQueryBuilder} that will score documents as - * {@code boost * S / (S + pivot)} where S is the value of the static feature. - * @param fieldName field that stores features - * @param pivot feature value that would give a score contribution equal to weight/2, must be in (0, +Infinity) - */ - public static RankFeatureQueryBuilder saturation(String fieldName, float pivot) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Saturation(pivot)); - } - - /** - * Same as {@link #saturation(String, float)} but a reasonably good default pivot value - * is computed based on index statistics and is approximately equal to the geometric mean of all - * values that exist in the index. - * @param fieldName field that stores features - */ - public static RankFeatureQueryBuilder saturation(String fieldName) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Saturation()); - } - - /** - * Return a new {@link RankFeatureQueryBuilder} that will score documents as - * {@code boost * Math.log(scalingFactor + S)} where S is the value of the static feature. - * @param fieldName field that stores features - * @param scalingFactor scaling factor applied before taking the logarithm, must be in [1, +Infinity) - */ - public static RankFeatureQueryBuilder log(String fieldName, float scalingFactor) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Log(scalingFactor)); - } - - /** - * Return a new {@link RankFeatureQueryBuilder} that will score documents as - * {@code boost * S^a / (S^a + pivot^a)} where S is the value of the static feature. - * @param fieldName field that stores features - * @param pivot feature value that would give a score contribution equal to weight/2, must be in (0, +Infinity) - * @param exp exponent, higher values make the function grow slower before 'pivot' and faster after 'pivot', - * must be in (0, +Infinity) - */ - public static RankFeatureQueryBuilder sigmoid(String fieldName, float pivot, float exp) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Sigmoid(pivot, exp)); - } - - /** - * Return a new {@link RankFeatureQueryBuilder} that will score documents as - * {@code S)} where S is the indexed value of the static feature. - * @param fieldName field that stores features - */ - public static RankFeatureQueryBuilder linear(String fieldName) { - return new RankFeatureQueryBuilder(fieldName, new RankFeatureQueryBuilder.ScoreFunction.Linear()); - } - -} From 89bf4b33e853bec1d9ce5a05b8d1bd57dd8242f0 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 1 Apr 2024 08:23:32 -0400 Subject: [PATCH 42/69] Make int8_hnsw our default index for new dense-vector fields (#106836) For float32, there is no compelling reason to use all the memory required by default for HNSW. Using `int8_hnsw` provides a much saner default when it comes to cost vs relevancy. So, on all new indices that use `dense_vector` and want to index them for fast search, we will default to `int8_hnsw`. Users can still customize their parameters, or prefer `hnsw` over float32 if they so desire. --- docs/changelog/106836.yaml | 5 ++ .../mapping/types/dense-vector.asciidoc | 10 ++-- .../search-your-data/knn-search.asciidoc | 7 ++- .../test/old_cluster/30_vector_search.yml | 4 ++ .../search.vectors/100_knn_nested_search.yml | 4 ++ .../120_knn_query_multiple_shards.yml | 4 ++ .../140_knn_query_with_other_queries.yml | 4 ++ .../160_knn_query_missing_params.yml | 8 ++++ .../test/search.vectors/40_knn_search.yml | 12 +++++ .../search.vectors/40_knn_search_cosine.yml | 20 ++++++++ .../60_knn_search_filter_alias.yml | 4 ++ .../80_dense_vector_indexed_by_default.yml | 38 ++++++++++----- .../elasticsearch/index/IndexVersions.java | 1 + .../vectors/DenseVectorFieldMapper.java | 48 ++++++++++++------- .../vectors/DenseVectorFieldMapperTests.java | 10 +++- .../xpack/rank/rrf/RRFRankSingleShardIT.java | 9 ++++ .../rest-api-spec/test/rrf/100_rank_rrf.yml | 4 ++ .../test/rrf/200_rank_rrf_script.yml | 8 ++++ .../test/rrf/300_rrf_retriever.yml | 4 ++ .../test/rrf/400_rrf_retriever_script.yml | 8 ++++ 20 files changed, 179 insertions(+), 33 deletions(-) create mode 100644 docs/changelog/106836.yaml diff --git a/docs/changelog/106836.yaml b/docs/changelog/106836.yaml new file mode 100644 index 0000000000000..f561f44d9bb2d --- /dev/null +++ b/docs/changelog/106836.yaml @@ -0,0 +1,5 @@ +pr: 106836 +summary: Make int8_hnsw our default index for new dense-vector fields +area: Mapping +type: enhancement +issues: [] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index cec41eab41238..14fe9d4963970 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -65,7 +65,7 @@ data structure to support fast kNN retrieval through the <> `float` vectors. Currently the only quantization method supported is `int8` and provided vectors `element_type` must be `float`. To use -a quantized index, you can set your index type to `int8_hnsw`. +a quantized index, you can set your index type to `int8_hnsw`. When indexing `float` vectors, the current default +index type is `int8_hnsw`. When using the `int8_hnsw` index, each of the `float` vectors' dimensions are quantized to 1-byte integers. This can reduce the memory footprint by as much as 75% at the cost of some accuracy. However, the disk usage can increase by @@ -240,9 +241,10 @@ expense of slower indexing speed. The type of kNN algorithm to use. Can be either any of: + -- -* `hnsw` - The default storage type. This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] for scalable +* `hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] for scalable approximate kNN search. This supports all `element_type` values. -* `int8_hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically scalar +* `int8_hnsw` - The default index type for float vectors. +This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically scalar quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint by 4x at the cost of some accuracy. See <>. * `flat` - This utilizes a brute-force search algorithm for exact kNN search. This supports all `element_type` values. diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 030c10a91d005..ffac84c11a779 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -272,6 +272,8 @@ If you want to provide `float` vectors, but want the memory savings of `byte` ve internally they are indexed as `byte` vectors. Additionally, the original `float` vectors are still retained in the index. +NOTE: The default index type for `dense_vector` is `int8_hnsw`. + To use quantization, you can use the index type `int8_hnsw` object in the `dense_vector` mapping. [source,console] @@ -652,7 +654,10 @@ PUT passage_vectors "properties": { "vector": { "type": "dense_vector", - "dims": 2 + "dims": 2, + "index_options": { + "type": "hnsw" + } }, "text": { "type": "text", diff --git a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml index 4aca71fe48f4a..96b950e5ae927 100644 --- a/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml +++ b/qa/rolling-upgrade-legacy/src/test/resources/rest-api-spec/test/old_cluster/30_vector_search.yml @@ -18,6 +18,10 @@ dims: 3 index: true similarity: l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 100 - do: bulk: index: test-float-index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml index 6c6c75990b0f5..32558dbe5a8c0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml @@ -23,6 +23,10 @@ setup: dims: 5 index: true similarity: l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml index b1c0fd948481b..eb70e5b7bcf64 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/120_knn_query_multiple_shards.yml @@ -19,6 +19,10 @@ setup: dims: 4 index : true similarity : l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 my_name: type: keyword store: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml index 8f52a72cce01e..0ea24686ff839 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/140_knn_query_with_other_queries.yml @@ -19,6 +19,10 @@ setup: dims: 4 index : true similarity : l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 my_name: type: keyword store: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml index 9ff6319a01af4..4a884b644c6a7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/160_knn_query_missing_params.yml @@ -15,6 +15,10 @@ setup: dims: 3 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 category: type: keyword nested: @@ -27,6 +31,10 @@ setup: dims: 5 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml index 57f8603f1e06e..c8cbf499cf8b2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search.yml @@ -15,11 +15,19 @@ setup: dims: 5 index: true similarity: l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 another_vector: type: dense_vector dims: 5 index: true similarity: l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 - do: index: @@ -371,6 +379,10 @@ setup: dims: 5 index: true similarity: max_inner_product + index_options: + type: hnsw + m: 16 + ef_construction: 200 - do: index: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml index 8faad25f0037d..b1933ebde297d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/40_knn_search_cosine.yml @@ -13,26 +13,46 @@ setup: dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 normalized_vector: type: dense_vector dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 end_normalized: type: dense_vector dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 first_normalized: type: dense_vector dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 middle_normalized: type: dense_vector dims: 5 index: true similarity: cosine + index_options: + type: hnsw + m: 16 + ef_construction: 200 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml index 92065857bd571..0672e27b43c67 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_knn_search_filter_alias.yml @@ -17,6 +17,10 @@ setup: dims: 4 index : true similarity : l2_norm + index_options: + type: hnsw + m: 16 + ef_construction: 200 name: type: keyword store: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml index 7f67d53f31384..784edfdac3469 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/80_dense_vector_indexed_by_default.yml @@ -21,16 +21,10 @@ setup: indices.get_mapping: index: test - - match: - test: - mappings: - properties: - vector: - type: dense_vector - dims: 5 - index: true - similarity: cosine - + - match: { test.mappings.properties.vector.type: dense_vector } + - match: { test.mappings.properties.vector.dims: 5 } + - match: { test.mappings.properties.vector.index: true } + - match: { test.mappings.properties.vector.similarity: cosine } --- "Indexed by default with specified similarity and index options": - do: @@ -127,7 +121,29 @@ setup: type: hnsw m: 32 ef_construction: 200 +--- +"Default index options for dense_vector": + - skip: + version: ' - 8.13.99' + reason: 'dense_vector indexed as int8_hnsw by default was added in 8.14' + - do: + indices.create: + index: test_default_index_options + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + - match: { acknowledged: true } + - do: + indices.get_mapping: + index: test_default_index_options - + - match: { test_default_index_options.mappings.properties.vector.type: dense_vector } + - match: { test_default_index_options.mappings.properties.vector.dims: 5 } + - match: { test_default_index_options.mappings.properties.vector.index: true } + - match: { test_default_index_options.mappings.properties.vector.similarity: cosine } + - match: { test_default_index_options.mappings.properties.vector.index_options.type: int8_hnsw } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index bca7b963becaa..6edd43683519e 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -103,6 +103,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion TIME_SERIES_ID_HASHING = def(8_502_00_1, Version.LUCENE_9_9_2); public static final IndexVersion UPGRADE_TO_LUCENE_9_10 = def(8_503_00_0, Version.LUCENE_9_10_0); public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); + public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 73e29a98c1531..db958dc8a8acb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -93,6 +93,7 @@ import static org.elasticsearch.common.Strings.format; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.index.IndexVersions.DEFAULT_DENSE_VECTOR_TO_INT8_HNSW; /** * A {@link FieldMapper} for indexing a dense vector of floats. @@ -108,6 +109,7 @@ static boolean isNotUnitVector(float magnitude) { public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; public static final IndexVersion NORMALIZE_COSINE = IndexVersions.NORMALIZED_VECTOR_COSINE; + public static final IndexVersion DEFAULT_TO_INT8 = DEFAULT_DENSE_VECTOR_TO_INT8_HNSW; public static final IndexVersion LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION = IndexVersions.V_8_9_0; public static final String CONTENT_TYPE = "dense_vector"; @@ -152,15 +154,7 @@ public static class Builder extends FieldMapper.Builder { }, m -> toType(m).fieldType().dims, XContentBuilder::field, Object::toString).setSerializerCheck((id, ic, v) -> v != null) .setMergeValidator((previous, current, c) -> previous == null || Objects.equals(previous, current)); private final Parameter similarity; - private final Parameter indexOptions = new Parameter<>( - "index_options", - false, - () -> null, - (n, c, o) -> o == null ? null : parseIndexOptions(n, o), - m -> toType(m).indexOptions, - XContentBuilder::field, - Objects::toString - ).setSerializerCheck((id, ic, v) -> v != null); + private final Parameter indexOptions; private final Parameter indexed; private final Parameter> meta = Parameter.metaParam(); @@ -170,6 +164,7 @@ public Builder(String name, IndexVersion indexVersionCreated) { super(name); this.indexVersionCreated = indexVersionCreated; final boolean indexedByDefault = indexVersionCreated.onOrAfter(INDEXED_BY_DEFAULT_INDEX_VERSION); + final boolean defaultInt8Hnsw = indexVersionCreated.onOrAfter(DEFAULT_DENSE_VECTOR_TO_INT8_HNSW); this.indexed = Parameter.indexParam(m -> toType(m).fieldType().indexed, indexedByDefault); if (indexedByDefault) { // Only serialize on newer index versions to prevent breaking existing indices when upgrading @@ -182,6 +177,34 @@ public Builder(String name, IndexVersion indexVersionCreated) { (Supplier) () -> indexedByDefault && indexed.getValue() ? VectorSimilarity.COSINE : null, VectorSimilarity.class ).acceptsNull().setSerializerCheck((id, ic, v) -> v != null); + this.indexOptions = new Parameter<>( + "index_options", + false, + () -> defaultInt8Hnsw && elementType.getValue() != ElementType.BYTE && this.indexed.getValue() + ? new Int8HnswIndexOptions( + Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, + Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, + null + ) + : null, + (n, c, o) -> o == null ? null : parseIndexOptions(n, o), + m -> toType(m).indexOptions, + (b, n, v) -> { + if (v != null) { + b.field(n, v); + } + }, + Objects::toString + ).setSerializerCheck((id, ic, v) -> v != null).addValidator(v -> { + if (v != null && v.supportsElementType(elementType.getValue()) == false) { + throw new IllegalArgumentException( + "[element_type] cannot be [" + elementType.getValue().toString() + "] when using index type [" + v.type + "]" + ); + } + }).acceptsNull(); + if (defaultInt8Hnsw) { + this.indexOptions.alwaysSerialize(); + } this.indexed.addValidator(v -> { if (v) { if (similarity.getValue() == null) { @@ -200,13 +223,6 @@ public Builder(String name, IndexVersion indexVersionCreated) { } } }); - this.indexOptions.addValidator(v -> { - if (v != null && v.supportsElementType(elementType.getValue()) == false) { - throw new IllegalArgumentException( - "[element_type] cannot be [" + elementType.getValue().toString() + "] when using index type [" + v.type + "]" - ); - } - }); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index c417ec995a20a..2d1b1cc9545db 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -247,7 +247,15 @@ public void testMergeDims() throws IOException { mapping = mapping(b -> { b.startObject("field"); - b.field("type", "dense_vector").field("dims", 4).field("similarity", "cosine").field("index", true); + b.field("type", "dense_vector") + .field("dims", 4) + .field("similarity", "cosine") + .field("index", true) + .startObject("index_options") + .field("type", "int8_hnsw") + .field("m", 16) + .field("ef_construction", 100) + .endObject(); b.endObject(); }); merge(mapperService, mapping); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java index 3a82f697acc9d..7269d9c3e5e7f 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java @@ -53,6 +53,9 @@ public void setupIndices() throws Exception { .field("dims", 1) .field("index", true) .field("similarity", "l2_norm") + .startObject("index_options") + .field("type", "hnsw") + .endObject() .endObject() .startObject("text") .field("type", "text") @@ -80,12 +83,18 @@ public void setupIndices() throws Exception { .field("dims", 1) .field("index", true) .field("similarity", "l2_norm") + .startObject("index_options") + .field("type", "hnsw") + .endObject() .endObject() .startObject("vector_desc") .field("type", "dense_vector") .field("dims", 1) .field("index", true) .field("similarity", "l2_norm") + .startObject("index_options") + .field("type", "hnsw") + .endObject() .endObject() .startObject("int") .field("type", "integer") diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml index 56cb8dd94de0d..e55a1897eb701 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/100_rank_rrf.yml @@ -21,6 +21,10 @@ setup: dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 - do: index: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml index 11d048b9d11be..de5b29b21da72 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/200_rank_rrf_script.yml @@ -18,11 +18,19 @@ setup: dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 vector_desc: type: dense_vector dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 int: type: integer text: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml index 2c7c5e5a50697..1387c37349cd4 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/300_rrf_retriever.yml @@ -21,6 +21,10 @@ setup: dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 - do: index: diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml index 7ac41de12c5e7..2c2b59f306ee3 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/400_rrf_retriever_script.yml @@ -20,11 +20,19 @@ setup: dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 vector_desc: type: dense_vector dims: 1 index: true similarity: l2_norm + index_options: + type: hnsw + ef_construction: 100 + m: 16 int: type: integer text: From f4613d0248368f4a8e28ad52b1b95156c4499a6d Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 1 Apr 2024 17:33:03 +0300 Subject: [PATCH 43/69] ESQL: perform a reduction on the data node (#106516) * Introduce node-level reduction (instead of the coordinator level one) behind a pragma --- docs/changelog/106516.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../xpack/esql/EsqlSecurityIT.java | 3 + .../src/main/resources/ip.csv-spec | 6 +- .../action/AbstractEsqlIntegTestCase.java | 3 + .../xpack/esql/action/EsqlActionTaskIT.java | 100 +++++++++++++++--- .../xpack/esql/io/stream/PlanNamedTypes.java | 6 +- .../xpack/esql/io/stream/PlanStreamInput.java | 4 + .../esql/io/stream/PlanStreamOutput.java | 9 ++ .../esql/optimizer/PhysicalPlanOptimizer.java | 3 +- .../esql/plan/physical/FragmentExec.java | 31 ++++-- .../esql/planner/LocalExecutionPlanner.java | 10 +- .../xpack/esql/planner/Mapper.java | 2 +- .../xpack/esql/planner/PlannerUtils.java | 38 +++++++ .../xpack/esql/plugin/ComputeService.java | 27 ++++- .../xpack/esql/plugin/QueryPragmas.java | 10 ++ .../xpack/esql/session/EsqlSession.java | 2 +- .../xpack/esql/planner/FilterTests.java | 2 +- 18 files changed, 224 insertions(+), 38 deletions(-) create mode 100644 docs/changelog/106516.yaml diff --git a/docs/changelog/106516.yaml b/docs/changelog/106516.yaml new file mode 100644 index 0000000000000..905896fb0ef03 --- /dev/null +++ b/docs/changelog/106516.yaml @@ -0,0 +1,5 @@ +pr: 106516 +summary: "ESQL: perform a reduction on the data node" +area: ES|QL +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 76f4d6c1c0fae..d7bc07b3eb2b4 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -158,6 +158,7 @@ static TransportVersion def(int id) { public static final TransportVersion SEARCH_NODE_LOAD_AUTOSCALING = def(8_617_00_0); public static final TransportVersion ESQL_ES_SOURCE_OPTIONS = def(8_618_00_0); public static final TransportVersion ADD_PERSISTENT_TASK_EXCEPTIONS = def(8_619_00_0); + public static final TransportVersion ESQL_REDUCER_NODE_FRAGMENT = def(8_620_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 2dd64cf02446b..7a9b90baa0d35 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -389,6 +389,9 @@ static Settings randomPragmas() { if (randomBoolean()) { settings.put("enrich_max_workers", between(1, 5)); } + if (randomBoolean()) { + settings.put("node_level_reduction", randomBoolean()); + } return settings.build(); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 09b17ed4112c9..58c1cf3dc9174 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -62,9 +62,9 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece lessThan required_feature: esql.mv_warn -from hosts | sort host, card | where ip0 < ip1 | keep card, host, ip0, ip1; -warning:Line 1:38: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:38: java.lang.IllegalArgumentException: single-value function encountered multi-value +from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; +warning:Line 1:43: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:43: java.lang.IllegalArgumentException: single-value function encountered multi-value card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 5ba9c622d85da..a9238d202e5b5 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -188,6 +188,9 @@ protected static QueryPragmas randomPragmas() { if (randomBoolean()) { settings.put("max_concurrent_shards_per_node", randomIntBetween(1, 10)); } + if (randomBoolean()) { + settings.put("node_level_reduction", randomBoolean()); + } } return new QueryPragmas(settings.build()); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index 92987db865ac7..672a1f6cc8c71 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -76,6 +76,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { private String READ_DESCRIPTION; private String MERGE_DESCRIPTION; private String REDUCE_DESCRIPTION; + private boolean nodeLevelReduction; @Before public void setup() { @@ -94,6 +95,7 @@ public void setup() { REDUCE_DESCRIPTION = """ \\_ExchangeSourceOperator[] \\_ExchangeSinkOperator"""; + nodeLevelReduction = randomBoolean(); } public void testTaskContents() throws Exception { @@ -209,22 +211,31 @@ public void testCancelEsqlTask() throws Exception { } private ActionFuture startEsql() { + return startEsql("from test | stats sum(pause_me)"); + } + + private ActionFuture startEsql(String query) { scriptPermits.drainPermits(); scriptPermits.release(between(1, 5)); - var pragmas = new QueryPragmas( - Settings.builder() - // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. - .put("data_partitioning", "shard") - // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. - .put("page_size", pageSize()) - // Report the status after every action - .put("status_interval", "0ms") - .build() - ); - return EsqlQueryRequestBuilder.newSyncEsqlQueryRequestBuilder(client()) - .query("from test | stats sum(pause_me)") - .pragmas(pragmas) - .execute(); + var settingsBuilder = Settings.builder() + // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. + .put("data_partitioning", "shard") + // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. + .put("page_size", pageSize()) + // Report the status after every action + .put("status_interval", "0ms"); + + if (nodeLevelReduction == false) { + // explicitly set the default (false) or don't + if (randomBoolean()) { + settingsBuilder.put("node_level_reduction", nodeLevelReduction); + } + } else { + settingsBuilder.put("node_level_reduction", nodeLevelReduction); + } + + var pragmas = new QueryPragmas(settingsBuilder.build()); + return EsqlQueryRequestBuilder.newSyncEsqlQueryRequestBuilder(client()).query(query).pragmas(pragmas).execute(); } private void cancelTask(TaskId taskId) { @@ -407,6 +418,67 @@ protected void doRun() throws Exception { } } + public void testTaskContentsForTopNQuery() throws Exception { + READ_DESCRIPTION = ("\\_LuceneTopNSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 1000, " + + "sorts = [{\"pause_me\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"long\"}}]]\n" + + "\\_ValuesSourceReaderOperator[fields = [pause_me]]\n" + + "\\_ProjectOperator[projection = [1]]\n" + + "\\_ExchangeSinkOperator").replace("pageSize()", Integer.toString(pageSize())); + MERGE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + + "\\_ProjectOperator[projection = [0]]\n" + + "\\_OutputOperator[columns = [pause_me]]"; + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction + ? "\\_TopNOperator[count=1000, elementTypes=[LONG], encoders=[DefaultSortable], " + + "sortOrders=[SortOrder[channel=0, asc=true, nullsFirst=false]]]\n" + : "") + + "\\_ExchangeSinkOperator"; + + ActionFuture response = startEsql("from test | sort pause_me | keep pause_me"); + try { + getTasksStarting(); + scriptPermits.release(pageSize()); + getTasksRunning(); + } finally { + // each scripted field "emit" is called by LuceneTopNSourceOperator and by ValuesSourceReaderOperator + scriptPermits.release(2 * numberOfDocs()); + try (EsqlQueryResponse esqlResponse = response.get()) { + assertThat(Iterators.flatMap(esqlResponse.values(), i -> i).next(), equalTo(1L)); + } + } + } + + public void testTaskContentsForLimitQuery() throws Exception { + String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); + READ_DESCRIPTION = """ + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit()] + \\_ValuesSourceReaderOperator[fields = [pause_me]] + \\_ProjectOperator[projection = [1]] + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit); + MERGE_DESCRIPTION = """ + \\_ExchangeSourceOperator[] + \\_LimitOperator[limit = limit()] + \\_ProjectOperator[projection = [0]] + \\_OutputOperator[columns = [pause_me]]""".replace("limit()", limit); + REDUCE_DESCRIPTION = ("\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_LimitOperator[limit = limit()]\n" : "") + + "\\_ExchangeSinkOperator").replace("limit()", limit); + + ActionFuture response = startEsql("from test | keep pause_me | limit " + limit); + try { + getTasksStarting(); + scriptPermits.release(pageSize()); + getTasksRunning(); + } finally { + scriptPermits.release(numberOfDocs()); + try (EsqlQueryResponse esqlResponse = response.get()) { + assertThat(Iterators.flatMap(esqlResponse.values(), i -> i).next(), equalTo(1L)); + } + } + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 44e134a0d7aec..536265b1be3e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -629,7 +629,8 @@ static FragmentExec readFragmentExec(PlanStreamInput in) throws IOException { in.readSource(), in.readLogicalPlanNode(), in.readOptionalNamedWriteable(QueryBuilder.class), - in.readOptionalVInt() + in.readOptionalVInt(), + in.getTransportVersion().onOrAfter(TransportVersions.ESQL_REDUCER_NODE_FRAGMENT) ? in.readOptionalPhysicalPlanNode() : null ); } @@ -638,6 +639,9 @@ static void writeFragmentExec(PlanStreamOutput out, FragmentExec fragmentExec) t out.writeLogicalPlanNode(fragmentExec.fragment()); out.writeOptionalNamedWriteable(fragmentExec.esFilter()); out.writeOptionalVInt(fragmentExec.estimatedRowSize()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_REDUCER_NODE_FRAGMENT)) { + out.writeOptionalPhysicalPlanNode(fragmentExec.reducer()); + } } static GrokExec readGrokExec(PlanStreamInput in) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index bdd93d733a460..046e46d216bdc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -104,6 +104,10 @@ public PhysicalPlan readPhysicalPlanNode() throws IOException { return readNamed(PhysicalPlan.class); } + public PhysicalPlan readOptionalPhysicalPlanNode() throws IOException { + return readOptionalNamed(PhysicalPlan.class); + } + public Source readSource() throws IOException { boolean hasSource = readBoolean(); return hasSource ? readSourceWithText(this, configuration.query()) : Source.EMPTY; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java index ac894ce7a099e..5ee292b6add9e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutput.java @@ -53,6 +53,15 @@ public void writePhysicalPlanNode(PhysicalPlan physicalPlan) throws IOException writeNamed(PhysicalPlan.class, physicalPlan); } + public void writeOptionalPhysicalPlanNode(PhysicalPlan physicalPlan) throws IOException { + if (physicalPlan == null) { + writeBoolean(false); + } else { + writeBoolean(true); + writePhysicalPlanNode(physicalPlan); + } + } + public void writeSource(Source source) throws IOException { writeBoolean(true); writeSourceNoText(this, source); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java index 42e54f002477a..ee095a24e20fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java @@ -151,7 +151,8 @@ public PhysicalPlan apply(PhysicalPlan plan) { Source.EMPTY, new Project(logicalFragment.source(), logicalFragment, output), fragmentExec.esFilter(), - fragmentExec.estimatedRowSize() + fragmentExec.estimatedRowSize(), + fragmentExec.reducer() ) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java index 43fccf4cf62da..e23a8c783e1e6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/FragmentExec.java @@ -20,6 +20,7 @@ public class FragmentExec extends LeafExec implements EstimatesRowSize { private final LogicalPlan fragment; private final QueryBuilder esFilter; + private final PhysicalPlan reducer; // datanode-level physical plan node that performs an intermediate (not partial) reduce /** * Estimate of the number of bytes that'll be loaded per position before @@ -28,14 +29,15 @@ public class FragmentExec extends LeafExec implements EstimatesRowSize { private final int estimatedRowSize; public FragmentExec(LogicalPlan fragment) { - this(fragment.source(), fragment, null, 0); + this(fragment.source(), fragment, null, 0, null); } - public FragmentExec(Source source, LogicalPlan fragment, QueryBuilder esFilter, int estimatedRowSize) { + public FragmentExec(Source source, LogicalPlan fragment, QueryBuilder esFilter, int estimatedRowSize, PhysicalPlan reducer) { super(source); this.fragment = fragment; this.esFilter = esFilter; this.estimatedRowSize = estimatedRowSize; + this.reducer = reducer; } public LogicalPlan fragment() { @@ -50,9 +52,13 @@ public Integer estimatedRowSize() { return estimatedRowSize; } + public PhysicalPlan reducer() { + return reducer; + } + @Override protected NodeInfo info() { - return NodeInfo.create(this, FragmentExec::new, fragment, esFilter, estimatedRowSize); + return NodeInfo.create(this, FragmentExec::new, fragment, esFilter, estimatedRowSize, reducer); } @Override @@ -65,12 +71,20 @@ public PhysicalPlan estimateRowSize(State state) { int estimatedRowSize = state.consumeAllFields(false); return Objects.equals(estimatedRowSize, this.estimatedRowSize) ? this - : new FragmentExec(source(), fragment, esFilter, estimatedRowSize); + : new FragmentExec(source(), fragment, esFilter, estimatedRowSize, reducer); + } + + public FragmentExec withFilter(QueryBuilder filter) { + return Objects.equals(filter, this.esFilter) ? this : new FragmentExec(source(), fragment, filter, estimatedRowSize, reducer); + } + + public FragmentExec withReducer(PhysicalPlan reducer) { + return Objects.equals(reducer, this.reducer) ? this : new FragmentExec(source(), fragment, esFilter, estimatedRowSize, reducer); } @Override public int hashCode() { - return Objects.hash(fragment, esFilter, estimatedRowSize); + return Objects.hash(fragment, esFilter, estimatedRowSize, reducer); } @Override @@ -86,7 +100,8 @@ public boolean equals(Object obj) { FragmentExec other = (FragmentExec) obj; return Objects.equals(fragment, other.fragment) && Objects.equals(esFilter, other.esFilter) - && Objects.equals(estimatedRowSize, other.estimatedRowSize); + && Objects.equals(estimatedRowSize, other.estimatedRowSize) + && Objects.equals(reducer, other.reducer); } @Override @@ -97,7 +112,9 @@ public String nodeString() { sb.append(esFilter); sb.append(", estimatedRowSize="); sb.append(estimatedRowSize); - sb.append(", fragment=[<>\n"); + sb.append(", reducer=["); + sb.append(reducer == null ? "" : reducer.toString()); + sb.append("], fragment=[<>\n"); sb.append(fragment.toString()); sb.append("<>]]"); return sb.toString(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index aad80b6c673ba..3ea3bd54da135 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -149,7 +149,7 @@ public LocalExecutionPlanner( /** * turn the given plan into a list of drivers to execute */ - public LocalExecutionPlan plan(PhysicalPlan node) { + public LocalExecutionPlan plan(PhysicalPlan localPhysicalPlan) { var context = new LocalExecutionPlannerContext( new ArrayList<>(), new Holder<>(DriverParallelism.SINGLE), @@ -160,11 +160,11 @@ public LocalExecutionPlan plan(PhysicalPlan node) { ); // workaround for https://github.com/elastic/elasticsearch/issues/99782 - node = node.transformUp( + localPhysicalPlan = localPhysicalPlan.transformUp( AggregateExec.class, a -> a.getMode() == AggregateExec.Mode.FINAL ? new ProjectExec(a.source(), a, Expressions.asAttributes(a.aggregates())) : a ); - PhysicalOperation physicalOperation = plan(node, context); + PhysicalOperation physicalOperation = plan(localPhysicalPlan, context); final TimeValue statusInterval = configuration.pragmas().statusInterval(); context.addDriverFactory( @@ -181,7 +181,7 @@ private PhysicalOperation plan(PhysicalPlan node, LocalExecutionPlannerContext c if (node instanceof AggregateExec aggregate) { return planAggregation(aggregate, context); } else if (node instanceof FieldExtractExec fieldExtractExec) { - return planFieldExtractNode(context, fieldExtractExec); + return planFieldExtractNode(fieldExtractExec, context); } else if (node instanceof ExchangeExec exchangeExec) { return planExchange(exchangeExec, context); } else if (node instanceof TopNExec topNExec) { @@ -259,7 +259,7 @@ private PhysicalOperation planEsStats(EsStatsQueryExec statsQuery, LocalExecutio return PhysicalOperation.fromSource(luceneFactory, layout.build()); } - private PhysicalOperation planFieldExtractNode(LocalExecutionPlannerContext context, FieldExtractExec fieldExtractExec) { + private PhysicalOperation planFieldExtractNode(FieldExtractExec fieldExtractExec, LocalExecutionPlannerContext context) { return physicalOperationProviders.fieldExtractPhysicalOperation(fieldExtractExec, plan(fieldExtractExec.child(), context)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 516c88b5f6526..2205947dccdeb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -116,7 +116,7 @@ public PhysicalPlan map(LogicalPlan p) { throw new EsqlIllegalArgumentException("unsupported logical plan node [" + p.nodeName() + "]"); } - private static boolean isPipelineBreaker(LogicalPlan p) { + static boolean isPipelineBreaker(LogicalPlan p) { return p instanceof Aggregate || p instanceof TopN || p instanceof Limit || p instanceof OrderBy; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index f8fd284bbd558..1e64a6f136310 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer; +import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -28,7 +29,10 @@ import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; +import org.elasticsearch.xpack.esql.plan.physical.LimitExec; +import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -37,8 +41,13 @@ import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.options.EsSourceOptions; +import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; +import org.elasticsearch.xpack.ql.plan.logical.Limit; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.ql.plan.logical.OrderBy; +import org.elasticsearch.xpack.ql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -73,6 +82,35 @@ public static Tuple breakPlanBetweenCoordinatorAndDa return new Tuple<>(coordinatorPlan, dataNodePlan.get()); } + public static PhysicalPlan dataNodeReductionPlan(LogicalPlan plan, PhysicalPlan unused) { + var pipelineBreakers = plan.collectFirstChildren(Mapper::isPipelineBreaker); + + if (pipelineBreakers.isEmpty() == false) { + UnaryPlan pipelineBreaker = (UnaryPlan) pipelineBreakers.get(0); + if (pipelineBreaker instanceof TopN topN) { + return new TopNExec(topN.source(), unused, topN.order(), topN.limit(), 2000); + } else if (pipelineBreaker instanceof Limit limit) { + return new LimitExec(limit.source(), unused, limit.limit()); + } else if (pipelineBreaker instanceof OrderBy order) { + return new OrderExec(order.source(), unused, order.order()); + } else if (pipelineBreaker instanceof Aggregate aggregate) { + // TODO handle this as a special PARTIAL step (intermediate) + /*return new AggregateExec( + aggregate.source(), + unused, + aggregate.groupings(), + aggregate.aggregates(), + AggregateExec.Mode.PARTIAL, + 0 + );*/ + return null; + } else { + throw new EsqlIllegalArgumentException("unsupported unary physical plan node [" + pipelineBreaker.nodeName() + "]"); + } + } + return null; + } + /** * Returns a set of concrete indices after resolving the original indices specified in the FROM command. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 8fae3c09f32d5..959783d2f5235 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -64,6 +64,7 @@ import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; @@ -289,12 +290,19 @@ private void startComputeOnDataNodes( ActionListener parentListener, Supplier> dataNodeListenerSupplier ) { + var planWithReducer = configuration.pragmas().nodeLevelReduction() == false + ? dataNodePlan + : dataNodePlan.transformUp(FragmentExec.class, f -> { + PhysicalPlan reductionNode = PlannerUtils.dataNodeReductionPlan(f.fragment(), dataNodePlan); + return reductionNode == null ? f : f.withReducer(reductionNode); + }); + // The lambda is to say if a TEXT field has an identical exact subfield // We cannot use SearchContext because we don't have it yet. // Since it's used only for @timestamp, it is relatively safe to assume it's not needed // but it would be better to have a proper impl. - QueryBuilder requestFilter = PlannerUtils.requestFilter(dataNodePlan, x -> true); - EsSourceOptions esSourceOptions = PlannerUtils.esSourceOptions(dataNodePlan); + QueryBuilder requestFilter = PlannerUtils.requestFilter(planWithReducer, x -> true); + EsSourceOptions esSourceOptions = PlannerUtils.esSourceOptions(planWithReducer); lookupDataNodes( parentTask, clusterAlias, @@ -327,7 +335,7 @@ private void startComputeOnDataNodes( clusterAlias, node.shardIds, node.aliasFilters, - dataNodePlan + planWithReducer ), parentTask, TransportRequestOptions.EMPTY, @@ -426,6 +434,9 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, LOGGER.debug("Received physical plan:\n{}", plan); plan = PlannerUtils.localPlan(context.searchContexts, context.configuration, plan); + // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) + // it's doing this in the planning of EsQueryExec (the source of the data) + // see also EsPhysicalOperationProviders.sourcePhysicalOperation LocalExecutionPlanner.LocalExecutionPlan localExecutionPlan = planner.plan(plan); if (LOGGER.isDebugEnabled()) { @@ -750,11 +761,19 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T final ActionListener listener = new ChannelActionListener<>(channel); final ExchangeSinkExec reducePlan; if (request.plan() instanceof ExchangeSinkExec plan) { + var fragments = plan.collectFirstChildren(FragmentExec.class::isInstance); + if (fragments.isEmpty()) { + listener.onFailure(new IllegalStateException("expected a fragment plan for a remote compute; got " + request.plan())); + return; + } + + var localExchangeSource = new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()); + FragmentExec fragment = (FragmentExec) fragments.get(0); reducePlan = new ExchangeSinkExec( plan.source(), plan.output(), plan.isIntermediateAgg(), - new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()) + fragment.reducer() != null ? fragment.reducer().replaceChildren(List.of(localExchangeSource)) : localExchangeSource ); } else { listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + request.plan())); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java index fd76edf46229e..f24619ff80d9a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java @@ -57,6 +57,8 @@ public final class QueryPragmas implements Writeable { public static final Setting MAX_CONCURRENT_SHARDS_PER_NODE = Setting.intSetting("max_concurrent_shards_per_node", 10, 1, 100); + public static final Setting NODE_LEVEL_REDUCTION = Setting.boolSetting("node_level_reduction", false); + public static final QueryPragmas EMPTY = new QueryPragmas(Settings.EMPTY); private final Settings settings; @@ -126,6 +128,14 @@ public int maxConcurrentShardsPerNode() { return MAX_CONCURRENT_SHARDS_PER_NODE.get(settings); } + /** + * Returns true if each data node should perform a local reduction for sort, limit, topN, stats or false if the coordinator node + * will perform the reduction. + */ + public boolean nodeLevelReduction() { + return NODE_LEVEL_REDUCTION.get(settings); + } + public boolean isEmpty() { return settings.isEmpty(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index bbf16fc946999..cc0010c788a0c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -133,7 +133,7 @@ public void execute(EsqlQueryRequest request, ActionListener liste // TODO: filter integration testing filter = fragmentFilter != null ? boolQuery().filter(fragmentFilter).must(filter) : filter; LOGGER.debug("Fold filter {} to EsQueryExec", filter); - f = new FragmentExec(f.source(), f.fragment(), filter, f.estimatedRowSize()); + f = f.withFilter(filter); } return f; }))) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index eef109cb2830e..aedb379338171 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -293,7 +293,7 @@ private PhysicalPlan plan(String query, QueryBuilder restFilter) { // System.out.println("physical\n" + physical); physical = physical.transformUp( FragmentExec.class, - f -> new FragmentExec(f.source(), f.fragment(), restFilter, f.estimatedRowSize()) + f -> new FragmentExec(f.source(), f.fragment(), restFilter, f.estimatedRowSize(), f.reducer()) ); physical = physicalPlanOptimizer.optimize(physical); // System.out.println("optimized\n" + physical); From f078c30be7d35e8906cadb938e07c2802dfaa828 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 08:17:26 -0700 Subject: [PATCH 44/69] AwaitsFix #106957 --- .../java/org/elasticsearch/kibana/KibanaThreadPoolTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java index 0974fd6d36b18..5fe5bbf95ba56 100644 --- a/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java +++ b/modules/kibana/src/test/java/org/elasticsearch/kibana/KibanaThreadPoolTests.java @@ -29,6 +29,7 @@ protected Collection> nodePlugins() { return Set.of(KibanaPlugin.class); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106957") public void testKibanaThreadPool() { runWithBlockedThreadPools(() -> { // index documents From af8de04156f7d767cda17fbeaa7e14358f817d52 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 08:23:27 -0700 Subject: [PATCH 45/69] Remove redundant testing of Java 22 (#106943) --- .buildkite/pipelines/periodic.template.yml | 2 -- .buildkite/pipelines/periodic.yml | 2 -- 2 files changed, 4 deletions(-) diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index 8e92fffbc6f88..05d516992a7f6 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -85,7 +85,6 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - - openjdk22 GRADLE_TASK: - checkPart1 - checkPart2 @@ -108,7 +107,6 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - - openjdk22 BWC_VERSION: $BWC_LIST agents: provider: gcp diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 0fa88bb08f468..435e709bbf072 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -416,7 +416,6 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - - openjdk22 GRADLE_TASK: - checkPart1 - checkPart2 @@ -439,7 +438,6 @@ steps: - graalvm-ce17 - openjdk17 - openjdk21 - - openjdk22 BWC_VERSION: ["7.17.20", "8.13.1", "8.14.0"] agents: provider: gcp From 25d3f70f3d912d52eec310f3dee0d1ad3751dcb5 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 1 Apr 2024 11:37:01 -0400 Subject: [PATCH 46/69] ESQL: More tests for DATE_TRUNC (#106908) This adds integration tests for `DATE_TRUNC` that round to `1 HOUR` and `1 MINUTE` - that's a thing folks will do and I didn't see it in the integration tests. We do it in unit tests but I just want to be extra paranoid. --- .../src/main/resources/date.csv-spec | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index de7a48bcf6834..96a025ce5dc9c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -159,6 +159,39 @@ x:date |hire_date:date 1995-01-01T00:00:00.000Z|1995-01-27T00:00:00.000Z ; +dateTruncHour + FROM sample_data +| SORT @timestamp ASC +| EVAL t = DATE_TRUNC(1 HOUR, @timestamp) +| KEEP t; + +t:date +2023-10-23T12:00:00 +2023-10-23T12:00:00 +2023-10-23T13:00:00 +2023-10-23T13:00:00 +2023-10-23T13:00:00 +2023-10-23T13:00:00 +2023-10-23T13:00:00 +; + +dateTruncMinute + FROM sample_data +| SORT @timestamp ASC +| EVAL t = DATE_TRUNC(1 MINUTE, @timestamp) +| KEEP t; + +t:date +2023-10-23T12:15:00 +2023-10-23T12:27:00 +2023-10-23T13:33:00 +2023-10-23T13:51:00 +2023-10-23T13:52:00 +2023-10-23T13:53:00 +2023-10-23T13:55:00 +; + + convertFromDatetime from employees | sort emp_no | keep birth_date | eval bd = to_datetime(birth_date) | limit 2; From af9390074cc8d0d6f7c6dfc38eb6d584cae11515 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 1 Apr 2024 17:53:51 +0200 Subject: [PATCH 47/69] Remove Singleton annotation and downstream code from o.e.common.inject (#106936) We don't use the singleton annotation anywhere so it and a bunch of the scoping code can just go away. Also, we don't use the stage functionality (it's always develop) so that can go away as well. --- .../common/inject/AbstractProcessor.java | 6 -- .../elasticsearch/common/inject/Binder.java | 15 +-- .../common/inject/BindingProcessor.java | 9 +- .../common/inject/InheritingState.java | 14 --- .../elasticsearch/common/inject/Injector.java | 1 - .../common/inject/InjectorBuilder.java | 18 ++-- .../common/inject/InjectorImpl.java | 9 -- .../common/inject/InjectorShell.java | 19 +--- .../common/inject/ScopeAnnotation.java | 41 --------- .../common/inject/ScopeBindingProcessor.java | 62 ------------- .../elasticsearch/common/inject/Scopes.java | 26 +----- .../common/inject/Singleton.java | 35 ------- .../elasticsearch/common/inject/Stage.java | 44 --------- .../elasticsearch/common/inject/State.java | 18 ---- .../internal/AbstractBindingBuilder.java | 2 +- .../common/inject/internal/Annotations.java | 47 ---------- .../common/inject/internal/BindingImpl.java | 2 +- .../common/inject/internal/Errors.java | 30 ------ .../inject/internal/InstanceBindingImpl.java | 4 +- .../inject/internal/LinkedBindingImpl.java | 4 +- .../internal/LinkedProviderBindingImpl.java | 4 +- .../internal/ProviderInstanceBindingImpl.java | 4 +- .../common/inject/internal/Scoping.java | 91 +------------------ .../internal/UntargettedBindingImpl.java | 4 +- .../common/inject/multibindings/Element.java | 2 - .../inject/multibindings/MapBinder.java | 2 +- .../inject/multibindings/Multibinder.java | 10 +- .../inject/multibindings/RealElement.java | 15 +-- .../common/inject/spi/ElementVisitor.java | 5 - .../common/inject/spi/Elements.java | 19 +--- .../common/inject/spi/ScopeBinding.java | 64 ------------- 31 files changed, 41 insertions(+), 585 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/ScopeAnnotation.java delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/Singleton.java delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/Stage.java delete mode 100644 server/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java diff --git a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java index d23a3c3c10995..3eba654d412bd 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.inject.spi.ElementVisitor; import org.elasticsearch.common.inject.spi.Message; import org.elasticsearch.common.inject.spi.ProviderLookup; -import org.elasticsearch.common.inject.spi.ScopeBinding; import java.util.Iterator; import java.util.List; @@ -73,11 +72,6 @@ public Boolean visit(Message message) { return false; } - @Override - public Boolean visit(ScopeBinding scopeBinding) { - return false; - } - @Override public Boolean visit(Binding binding) { return false; diff --git a/server/src/main/java/org/elasticsearch/common/inject/Binder.java b/server/src/main/java/org/elasticsearch/common/inject/Binder.java index 97aa924d32cb1..07a8979eb18a6 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Binder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Binder.java @@ -20,8 +20,6 @@ import org.elasticsearch.common.inject.binder.LinkedBindingBuilder; import org.elasticsearch.common.inject.spi.Message; -import java.lang.annotation.Annotation; - /** * Collects configuration information (primarily bindings) which will be * used to create an {@link Injector}. Guice provides this object to your @@ -96,11 +94,6 @@ *

Note: a scope specified in this way overrides any scope that * was specified with an annotation on the {@code ServiceImpl} class. * - *

Besides {@link Singleton}/{@link Scopes#SINGLETON}, there are - * servlet-specific scopes available in - * {@code com.google.inject.servlet.ServletScopes}, and your Modules can - * contribute their own custom scopes for use here as well. - * *

  *     bind(new TypeLiteral<PaymentService<CreditCard>>() {})
  *         .to(CreditCardPaymentService.class);
@@ -165,8 +158,7 @@ * cases Guice will let something bogus slip by, and will then inform you of * the problems at runtime, as soon as you try to create your Injector. * - *

The other methods of Binder such as {@link #bindScope}, - * {@link #install}, and {@link #addError} are not part of the Binding EDSL; + *

The other methods of Binder such as {@link #install}, and {@link #addError} are not part of the Binding EDSL; * you can learn how to use these in the usual way, from the method * documentation. * @@ -176,11 +168,6 @@ */ public interface Binder { - /** - * Binds a scope to an annotation. - */ - void bindScope(Class annotationType, Scope scope); - /** * See the EDSL examples at {@link Binder}. */ diff --git a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java index 0865bf47090af..dfa4fcb16bc62 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java @@ -16,7 +16,6 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.Annotations; import org.elasticsearch.common.inject.internal.BindingImpl; import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.ErrorsException; @@ -73,9 +72,7 @@ public Boolean visit(Binding command) { return true; } - validateKey(command.getSource(), command.getKey()); - - final Scoping scoping = Scopes.makeInjectable(((BindingImpl) command).getScoping(), injector, errors); + final Scoping scoping = ((BindingImpl) command).getScoping(); command.acceptTargetVisitor(new BindingTargetVisitor() { @@ -173,10 +170,6 @@ public Void visit() { return true; } - private void validateKey(Object source, Key key) { - Annotations.checkForMisplacedScopeAnnotations(key.getRawType(), source, errors); - } - static UntargettedBindingImpl invalidBinding(InjectorImpl injector, Key key, Object source) { return new UntargettedBindingImpl<>(injector, key, source); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/InheritingState.java b/server/src/main/java/org/elasticsearch/common/inject/InheritingState.java index 95788343f6f57..bf6dbabafb034 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/InheritingState.java +++ b/server/src/main/java/org/elasticsearch/common/inject/InheritingState.java @@ -23,10 +23,8 @@ import org.elasticsearch.common.inject.internal.MatcherAndConverter; import org.elasticsearch.common.inject.internal.SourceProvider; -import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -41,7 +39,6 @@ class InheritingState implements State { // Must be a linked hashmap in order to preserve order of bindings in Modules. private final Map, Binding> explicitBindingsMutable = new LinkedHashMap<>(); private final Map, Binding> explicitBindings = Collections.unmodifiableMap(explicitBindingsMutable); - private final Map, Scope> scopes = new HashMap<>(); private final List converters = new ArrayList<>(); private WeakKeySet blacklistedKeys = new WeakKeySet(); private final Object lock; @@ -72,17 +69,6 @@ public void putBinding(Key key, BindingImpl binding) { explicitBindingsMutable.put(key, binding); } - @Override - public Scope getScope(Class annotationType) { - Scope scope = scopes.get(annotationType); - return scope != null ? scope : State.NONE.getScope(annotationType); - } - - @Override - public void putAnnotation(Class annotationType, Scope scope) { - scopes.put(annotationType, scope); - } - @Override public Iterable getConvertersThisLevel() { return converters; diff --git a/server/src/main/java/org/elasticsearch/common/inject/Injector.java b/server/src/main/java/org/elasticsearch/common/inject/Injector.java index a4af1300e6414..a3060af8f56d4 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Injector.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Injector.java @@ -29,7 +29,6 @@ *

  • This {@link Injector} instance itself *
  • A {@code Provider} for each binding of type {@code T} *
  • The {@link java.util.logging.Logger} for the class being injected - *
  • The {@link Stage} in which the Injector was created * *

    * Injectors are created using the facade class {@link Guice}. diff --git a/server/src/main/java/org/elasticsearch/common/inject/InjectorBuilder.java b/server/src/main/java/org/elasticsearch/common/inject/InjectorBuilder.java index 737176c77de54..4c5cb95491ebb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/InjectorBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/InjectorBuilder.java @@ -36,8 +36,7 @@ * No user code is executed in this phase.

  • *
  • Dynamic injection. In this phase, we call user code. We inject members that requested * injection. This may require user's objects be created and their providers be called. And we - * create eager singletons. In this phase, user code may have started other threads. This phase - * is not executed for injectors created using {@link Stage#TOOL the tool stage}
  • + * create eager singletons. In this phase, user code may have started other threads. * * * @author crazybob@google.com (Bob Lee) @@ -136,27 +135,26 @@ private void injectDynamically() { errors.throwCreationExceptionIfErrorsExist(); for (InjectorShell shell : shells) { - loadEagerSingletons(shell.getInjector(), Stage.DEVELOPMENT, errors); + loadEagerSingletons(shell.getInjector(), errors); } stopwatch.resetAndLog("Preloading singletons"); errors.throwCreationExceptionIfErrorsExist(); } /** - * Loads eager singletons, or all singletons if we're in Stage.PRODUCTION. Bindings discovered - * while we're binding these singletons are not be eager. + * Loads eager singletons. Bindings discovered while we're binding these singletons are not be eager. */ - public static void loadEagerSingletons(InjectorImpl injector, Stage stage, Errors errors) { + public static void loadEagerSingletons(InjectorImpl injector, Errors errors) { for (final Binding binding : injector.state.getExplicitBindingsThisLevel().values()) { - loadEagerSingletons(injector, stage, errors, (BindingImpl) binding); + loadEagerSingletons(injector, errors, (BindingImpl) binding); } for (final BindingImpl binding : injector.jitBindings.values()) { - loadEagerSingletons(injector, stage, errors, binding); + loadEagerSingletons(injector, errors, binding); } } - private static void loadEagerSingletons(InjectorImpl injector, Stage stage, final Errors errors, BindingImpl binding) { - if (binding.getScoping().isEagerSingleton(stage)) { + private static void loadEagerSingletons(InjectorImpl injector, final Errors errors, BindingImpl binding) { + if (binding.getScoping().isEagerSingleton()) { try { injector.callInContext(new ContextualCallable() { final Dependency dependency = Dependency.get(binding.getKey()); diff --git a/server/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java b/server/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java index 8614fd99da088..10385c54860bc 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/InjectorImpl.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.inject.spi.ProviderKeyBinding; import org.elasticsearch.common.inject.util.Providers; -import java.lang.annotation.Annotation; import java.lang.reflect.GenericArrayType; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; @@ -46,7 +45,6 @@ import java.util.Map; import static java.util.Collections.emptySet; -import static org.elasticsearch.common.inject.internal.Annotations.findScopeAnnotation; /** * Default {@link Injector} implementation. @@ -330,13 +328,6 @@ BindingImpl createUnitializedBinding(Key key, Scoping scoping, Object throw errors.cannotInjectInnerClass(rawType).toException(); } - if (scoping.isExplicitlyScoped() == false) { - Class scopeAnnotation = findScopeAnnotation(errors, rawType); - if (scopeAnnotation != null) { - scoping = Scopes.makeInjectable(Scoping.forAnnotation(scopeAnnotation), this, errors.withSource(rawType)); - } - } - return ConstructorBindingImpl.create(this, key, source, scoping); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/InjectorShell.java b/server/src/main/java/org/elasticsearch/common/inject/InjectorShell.java index cb3b4d63ed3e2..e6df9e2641cd8 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/InjectorShell.java +++ b/server/src/main/java/org/elasticsearch/common/inject/InjectorShell.java @@ -30,11 +30,9 @@ import java.util.ArrayList; import java.util.List; -import java.util.Objects; import java.util.logging.Logger; import static java.util.Collections.emptySet; -import static org.elasticsearch.common.inject.Scopes.SINGLETON; /** * A partially-initialized injector. See {@link InjectorBuilder}, which uses this to build a tree @@ -69,8 +67,6 @@ static class Builder { */ private State state; - private final Stage stage = Stage.DEVELOPMENT; - void addModules(Iterable modules) { for (Module module : modules) { this.modules.add(module); @@ -95,10 +91,10 @@ List build(BindingProcessor bindingProcessor, Stopwatch stopwatch InjectorImpl injector = new InjectorImpl(state); // bind Stage and Singleton if this is a top-level injector - modules.add(0, new RootModule(stage)); + modules.add(0, new RootModule()); new TypeConverterBindingProcessor(errors).prepareBuiltInConverters(injector); - elements.addAll(Elements.getElements(stage, modules)); + elements.addAll(Elements.getElements(modules)); stopwatch.resetAndLog("Module execution"); new MessageProcessor(errors).process(injector, elements); @@ -106,7 +102,6 @@ List build(BindingProcessor bindingProcessor, Stopwatch stopwatch injector.membersInjectorStore = new MembersInjectorStore(injector); stopwatch.resetAndLog("TypeListeners creation"); - new ScopeBindingProcessor(errors).process(injector, elements); stopwatch.resetAndLog("Scopes creation"); new TypeConverterBindingProcessor(errors).process(injector, elements); @@ -219,17 +214,11 @@ public String toString() { } private static class RootModule implements Module { - final Stage stage; - - private RootModule(Stage stage) { - this.stage = Objects.requireNonNull(stage, "stage"); - } + private RootModule() {} @Override public void configure(Binder binder) { - binder = binder.withSource(SourceProvider.UNKNOWN_SOURCE); - binder.bind(Stage.class).toInstance(stage); - binder.bindScope(Singleton.class, SINGLETON); + binder.withSource(SourceProvider.UNKNOWN_SOURCE); } } } diff --git a/server/src/main/java/org/elasticsearch/common/inject/ScopeAnnotation.java b/server/src/main/java/org/elasticsearch/common/inject/ScopeAnnotation.java deleted file mode 100644 index ea1dd376b29dd..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/ScopeAnnotation.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject; - -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.ElementType.ANNOTATION_TYPE; -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -/** - * Annotates annotations which are used for scoping. Only one such annotation - * may apply to a single implementation class. You must also annotate scope - * annotations with {@code @Retention(RUNTIME)}. For example: - *
    - *   {@code @}Retention(RUNTIME)
    - *   {@code @}Target(TYPE)
    - *   {@code @}ScopeAnnotation
    - *   public {@code @}interface SessionScoped {}
    - * 
    - * - * @author crazybob@google.com (Bob Lee) - */ -@Target(ANNOTATION_TYPE) -@Retention(RUNTIME) -public @interface ScopeAnnotation { -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java deleted file mode 100644 index c14acdde94c35..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/ScopeBindingProcessor.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (C) 2008 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject; - -import org.elasticsearch.common.inject.internal.Annotations; -import org.elasticsearch.common.inject.internal.Errors; -import org.elasticsearch.common.inject.spi.ScopeBinding; - -import java.lang.annotation.Annotation; -import java.util.Objects; - -/** - * Handles {@link Binder#bindScope} commands. - * - * @author crazybob@google.com (Bob Lee) - * @author jessewilson@google.com (Jesse Wilson) - */ -class ScopeBindingProcessor extends AbstractProcessor { - - ScopeBindingProcessor(Errors errors) { - super(errors); - } - - @Override - public Boolean visit(ScopeBinding command) { - Scope scope = command.getScope(); - Class annotationType = command.getAnnotationType(); - - if (Annotations.isScopeAnnotation(annotationType) == false) { - errors.withSource(annotationType).missingScopeAnnotation(); - // Go ahead and bind anyway so we don't get collateral errors. - } - - if (Annotations.isRetainedAtRuntime(annotationType) == false) { - errors.withSource(annotationType).missingRuntimeRetention(command.getSource()); - // Go ahead and bind anyway so we don't get collateral errors. - } - - Scope existing = injector.state.getScope(Objects.requireNonNull(annotationType, "annotation type")); - if (existing != null) { - errors.duplicateScopes(existing, annotationType, scope); - } else { - injector.state.putAnnotation(annotationType, Objects.requireNonNull(scope, "scope")); - } - - return true; - } -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/Scopes.java b/server/src/main/java/org/elasticsearch/common/inject/Scopes.java index ffdf1bf3192ca..60f36fd879aa2 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Scopes.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Scopes.java @@ -16,11 +16,9 @@ package org.elasticsearch.common.inject; -import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.InternalFactory; import org.elasticsearch.common.inject.internal.Scoping; -import java.lang.annotation.Annotation; import java.util.Locale; /** @@ -33,7 +31,7 @@ public class Scopes { private Scopes() {} /** - * One instance per {@link Injector}. Also see {@code @}{@link Singleton}. + * One instance per {@link Injector}. */ public static final Scope SINGLETON = new Scope() { @Override @@ -82,8 +80,7 @@ public String toString() { * binding arrives it will need to obtain the instance over again. *

    * This exists only in case a class has been annotated with a scope - * annotation such as {@link Singleton @Singleton}, and you need to override - * this to "no scope" in your binding. + * annotation and you need to override this to "no scope" in your binding. * * @since 2.0 */ @@ -115,23 +112,4 @@ static InternalFactory scope(InjectorImpl injector, InternalFac return new InternalFactoryToProviderAdapter<>(Initializables.of(scoped)); } - /** - * Replaces annotation scopes with instance scopes using the Injector's annotation-to-instance - * map. If the scope annotation has no corresponding instance, an error will be added and unscoped - * will be retuned. - */ - static Scoping makeInjectable(Scoping scoping, InjectorImpl injector, Errors errors) { - Class scopeAnnotation = scoping.getScopeAnnotation(); - if (scopeAnnotation == null) { - return scoping; - } - - Scope scope = injector.state.getScope(scopeAnnotation); - if (scope != null) { - return Scoping.forInstance(scope); - } - - errors.scopeNotFound(scopeAnnotation); - return Scoping.UNSCOPED; - } } diff --git a/server/src/main/java/org/elasticsearch/common/inject/Singleton.java b/server/src/main/java/org/elasticsearch/common/inject/Singleton.java deleted file mode 100644 index 24a4ba92dd20a..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/Singleton.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; - -import static java.lang.annotation.RetentionPolicy.RUNTIME; - -/** - * Apply this to implementation classes when you want only one instance - * (per {@link Injector}) to be reused for all injections for that binding. - * - * @author crazybob@google.com (Bob Lee) - */ -@Target({ ElementType.TYPE, ElementType.METHOD }) -@Retention(RUNTIME) -@ScopeAnnotation -public @interface Singleton { -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/Stage.java b/server/src/main/java/org/elasticsearch/common/inject/Stage.java deleted file mode 100644 index 5533cae4c4123..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/Stage.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject; - -/** - * The stage we're running in. - * - * @author crazybob@google.com (Bob Lee) - */ -public enum Stage { - - /** - * We're running in a tool (an IDE plugin for example). We need binding meta data but not a - * functioning Injector. Do not inject members of instances. Do not load eager singletons. Do as - * little as possible so our tools run nice and snappy. Injectors created in this stage cannot - * be used to satisfy injections. - */ - TOOL, - - /** - * We want fast startup times at the expense of runtime performance and some up front error - * checking. - */ - DEVELOPMENT, - - /** - * We want to catch errors as early as possible and take performance hits up front. - */ - PRODUCTION -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/State.java b/server/src/main/java/org/elasticsearch/common/inject/State.java index 553ed746ede80..e8b9404447f4f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/State.java +++ b/server/src/main/java/org/elasticsearch/common/inject/State.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.MatcherAndConverter; -import java.lang.annotation.Annotation; import java.util.Map; import static java.util.Collections.emptySet; @@ -54,16 +53,6 @@ public void putBinding(Key key, BindingImpl binding) { throw new UnsupportedOperationException(); } - @Override - public Scope getScope(Class scopingAnnotation) { - return null; - } - - @Override - public void putAnnotation(Class annotationType, Scope scope) { - throw new UnsupportedOperationException(); - } - @Override public void addConverter(MatcherAndConverter matcherAndConverter) { throw new UnsupportedOperationException(); @@ -113,13 +102,6 @@ public Object lock() { void putBinding(Key key, BindingImpl binding); - /** - * Returns the matching scope, or null. - */ - Scope getScope(Class scopingAnnotation); - - void putAnnotation(Class annotationType, Scope scope); - void addConverter(MatcherAndConverter matcherAndConverter); /** diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java b/server/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java index 60b6a74dec997..345c92ce8d354 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/AbstractBindingBuilder.java @@ -61,7 +61,7 @@ protected BindingImpl setBinding(BindingImpl binding) { public void asEagerSingleton() { checkNotScoped(); - setBinding(getBinding().withScoping(Scoping.EAGER_SINGLETON)); + setBinding(getBinding().withEagerSingletonScoping()); } protected void checkNotTargetted() { diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Annotations.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Annotations.java index 68fb7e68ba4bc..24f2539e2574e 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/Annotations.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/Annotations.java @@ -16,10 +16,8 @@ package org.elasticsearch.common.inject.internal; -import org.elasticsearch.common.Classes; import org.elasticsearch.common.inject.BindingAnnotation; import org.elasticsearch.common.inject.Key; -import org.elasticsearch.common.inject.ScopeAnnotation; import org.elasticsearch.common.inject.TypeLiteral; import java.lang.annotation.Annotation; @@ -42,51 +40,6 @@ public static boolean isRetainedAtRuntime(Class annotation return retention != null && retention.value() == RetentionPolicy.RUNTIME; } - /** - * Returns the scope annotation on {@code type}, or null if none is specified. - */ - public static Class findScopeAnnotation(Errors errors, Class implementation) { - return findScopeAnnotation(errors, implementation.getAnnotations()); - } - - /** - * Returns the scoping annotation, or null if there isn't one. - */ - public static Class findScopeAnnotation(Errors errors, Annotation[] annotations) { - Class found = null; - - for (Annotation annotation : annotations) { - if (annotation.annotationType().getAnnotation(ScopeAnnotation.class) != null) { - if (found != null) { - errors.duplicateScopeAnnotations(found, annotation.annotationType()); - } else { - found = annotation.annotationType(); - } - } - } - - return found; - } - - public static boolean isScopeAnnotation(Class annotationType) { - return annotationType.getAnnotation(ScopeAnnotation.class) != null; - } - - /** - * Adds an error if there is a misplaced annotations on {@code type}. Scoping - * annotations are not allowed on abstract classes or interfaces. - */ - public static void checkForMisplacedScopeAnnotations(Class type, Object source, Errors errors) { - if (Classes.isConcrete(type)) { - return; - } - - Class scopeAnnotation = findScopeAnnotation(errors, type); - if (scopeAnnotation != null) { - errors.withSource(type).scopeAnnotationOnAbstractType(scopeAnnotation, type, source); - } - } - /** * Gets a key for the given type, member and annotations. */ diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/BindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/BindingImpl.java index d6a00a7d3dc6c..b68600ef5381f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/BindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/BindingImpl.java @@ -95,7 +95,7 @@ public V acceptVisitor(ElementVisitor visitor) { return visitor.visit(this); } - protected BindingImpl withScoping(Scoping scoping) { + protected BindingImpl withEagerSingletonScoping() { throw new AssertionError(); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java index ea4b530f48b9b..cb78cf34e7d8a 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/Errors.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Key; import org.elasticsearch.common.inject.ProvisionException; -import org.elasticsearch.common.inject.Scope; import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.spi.Dependency; import org.elasticsearch.common.inject.spi.InjectionPoint; @@ -196,14 +195,6 @@ public Errors bindingToProvider() { return addMessage("Binding to Provider is not allowed."); } - public Errors missingRuntimeRetention(Object source) { - return addMessage("Please annotate with @Retention(RUNTIME).%n" + " Bound at %s.", convert(source)); - } - - public Errors missingScopeAnnotation() { - return addMessage("Please annotate with @ScopeAnnotation."); - } - public Errors optionalConstructor(Constructor constructor) { return addMessage("%s is annotated @Inject(optional=true), " + "but constructors cannot be optional.", constructor); } @@ -212,19 +203,6 @@ public Errors cannotBindToGuiceType(String simpleName) { return addMessage("Binding to core guice framework type is not allowed: %s.", simpleName); } - public Errors scopeNotFound(Class scopeAnnotation) { - return addMessage("No scope is bound to %s.", scopeAnnotation); - } - - public Errors scopeAnnotationOnAbstractType(Class scopeAnnotation, Class type, Object source) { - return addMessage( - "%s is annotated with %s, but scope annotations are not supported " + "for abstract types.%n Bound at %s.", - type, - scopeAnnotation, - convert(source) - ); - } - public Errors misplacedBindingAnnotation(Member member, Annotation bindingAnnotation) { return addMessage( "%s is annotated with %s, but binding annotations should be applied " + "to its parameters instead.", @@ -245,10 +223,6 @@ public Errors tooManyConstructors(Class implementation) { return addMessage("%s has more than one constructor annotated with @Inject. " + CONSTRUCTOR_RULES, implementation); } - public Errors duplicateScopes(Scope existing, Class annotationType, Scope scope) { - return addMessage("Scope %s is already bound to %s. Cannot bind %s.", existing, annotationType, scope); - } - public Errors missingConstantValues() { return addMessage("Missing constant value. Please call to(...)."); } @@ -264,10 +238,6 @@ public Errors duplicateBindingAnnotations(Member member, Class a, Class b) { - return addMessage("More than one scope annotation was found: %s and %s.", a, b); - } - public Errors recursiveBinding() { return addMessage("Binding points to itself."); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/InstanceBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/InstanceBindingImpl.java index f5b36cf33b800..eae400dbbe052 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/InstanceBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/InstanceBindingImpl.java @@ -74,8 +74,8 @@ public Set getInjectionPoints() { } @Override - public BindingImpl withScoping(Scoping scoping) { - return new InstanceBindingImpl<>(getSource(), getKey(), scoping, injectionPoints, instance); + public BindingImpl withEagerSingletonScoping() { + return new InstanceBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON, injectionPoints, instance); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedBindingImpl.java index 135726f80e25b..79e2a4f34c7ec 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedBindingImpl.java @@ -53,8 +53,8 @@ public Key getLinkedKey() { } @Override - public BindingImpl withScoping(Scoping scoping) { - return new LinkedBindingImpl<>(getSource(), getKey(), scoping, targetKey); + public BindingImpl withEagerSingletonScoping() { + return new LinkedBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON, targetKey); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedProviderBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedProviderBindingImpl.java index 0bfd2ef273a74..7f9e36be4e49d 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedProviderBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/LinkedProviderBindingImpl.java @@ -54,8 +54,8 @@ public Key> getProviderKey() { } @Override - public BindingImpl withScoping(Scoping scoping) { - return new LinkedProviderBindingImpl<>(getSource(), getKey(), scoping, providerKey); + public BindingImpl withEagerSingletonScoping() { + return new LinkedProviderBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON, providerKey); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderInstanceBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderInstanceBindingImpl.java index 792c18920a6fa..ae8823e2f7246 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderInstanceBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderInstanceBindingImpl.java @@ -72,8 +72,8 @@ public Set getInjectionPoints() { } @Override - public BindingImpl withScoping(Scoping scoping) { - return new ProviderInstanceBindingImpl<>(getSource(), getKey(), scoping, injectionPoints, providerInstance); + public BindingImpl withEagerSingletonScoping() { + return new ProviderInstanceBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON, injectionPoints, providerInstance); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Scoping.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Scoping.java index 797996afbf499..d0544aff20e39 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/Scoping.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/Scoping.java @@ -18,10 +18,6 @@ import org.elasticsearch.common.inject.Scope; import org.elasticsearch.common.inject.Scopes; -import org.elasticsearch.common.inject.Singleton; -import org.elasticsearch.common.inject.Stage; - -import java.lang.annotation.Annotation; /** * References a scope, either directly (as a scope instance), or indirectly (as a scope annotation). @@ -49,34 +45,6 @@ public String toString() { }; - public static final Scoping SINGLETON_ANNOTATION = new Scoping() { - - @Override - public Class getScopeAnnotation() { - return Singleton.class; - } - - @Override - public String toString() { - return Singleton.class.getName(); - } - - }; - - public static final Scoping SINGLETON_INSTANCE = new Scoping() { - - @Override - public Scope getScopeInstance() { - return Scopes.SINGLETON; - } - - @Override - public String toString() { - return Scopes.SINGLETON.toString(); - } - - }; - public static final Scoping EAGER_SINGLETON = new Scoping() { @Override @@ -91,46 +59,6 @@ public String toString() { }; - public static Scoping forAnnotation(final Class scopingAnnotation) { - if (scopingAnnotation == Singleton.class) { - return SINGLETON_ANNOTATION; - } - - return new Scoping() { - - @Override - public Class getScopeAnnotation() { - return scopingAnnotation; - } - - @Override - public String toString() { - return scopingAnnotation.getName(); - } - - }; - } - - public static Scoping forInstance(final Scope scope) { - if (scope == Scopes.SINGLETON) { - return SINGLETON_INSTANCE; - } - - return new Scoping() { - - @Override - public Scope getScopeInstance() { - return scope; - } - - @Override - public String toString() { - return scope.toString(); - } - - }; - } - /** * Returns true if this scope was explicitly applied. If no scope was explicitly applied then the * scoping annotation will be used. @@ -150,16 +78,8 @@ public boolean isNoScope() { /** * Returns true if this scope is a singleton that should be loaded eagerly in {@code stage}. */ - public boolean isEagerSingleton(Stage stage) { - if (this == EAGER_SINGLETON) { - return true; - } - - if (stage == Stage.PRODUCTION) { - return this == SINGLETON_ANNOTATION || this == SINGLETON_INSTANCE; - } - - return false; + public boolean isEagerSingleton() { + return this == EAGER_SINGLETON; } /** @@ -169,12 +89,5 @@ public Scope getScopeInstance() { return null; } - /** - * Returns the scope annotation, or {@code null} if that isn't known for this instance. - */ - public Class getScopeAnnotation() { - return null; - } - private Scoping() {} } diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/UntargettedBindingImpl.java b/server/src/main/java/org/elasticsearch/common/inject/internal/UntargettedBindingImpl.java index c5595d570563f..d0ef8affa0bfb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/UntargettedBindingImpl.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/UntargettedBindingImpl.java @@ -37,8 +37,8 @@ public void acceptTargetVisitor(BindingTargetVisitor visitor) } @Override - public BindingImpl withScoping(Scoping scoping) { - return new UntargettedBindingImpl<>(getSource(), getKey(), scoping); + public BindingImpl withEagerSingletonScoping() { + return new UntargettedBindingImpl<>(getSource(), getKey(), Scoping.EAGER_SINGLETON); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Element.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Element.java index 596418f3e182d..6fca4135b596f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Element.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Element.java @@ -32,7 +32,5 @@ @Retention(RUNTIME) @BindingAnnotation @interface Element { - String setName(); - int uniqueId(); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java index f8646ecf5fa22..5ba9bb9cdead0 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/MapBinder.java @@ -211,7 +211,7 @@ public LinkedBindingBuilder addBinding(K key) { Multibinder.checkNotNull(key, "key"); Multibinder.checkConfiguration(isInitialized() == false, "MapBinder was already initialized"); - Key valueKey = Key.get(valueType, new RealElement(RealMultibinder.getSetName())); + Key valueKey = Key.get(valueType, new RealElement()); entrySetBinder.addBinding().toInstance(new MapEntry<>(key, binder.getProvider(valueKey))); return binder.bind(valueKey); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index 26609b4e2bdb9..dc44f68f2a3d0 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -152,7 +152,7 @@ public void configure(Binder binder) { public LinkedBindingBuilder addBinding() { checkConfiguration(isInitialized() == false, "Multibinder was already initialized"); - return binder.bind(Key.get(elementType, new RealElement(""))); + return binder.bind(Key.get(elementType, new RealElement())); } /** @@ -165,9 +165,7 @@ public void initialize(Injector injector) { providers = new ArrayList<>(); for (Binding entry : injector.findBindingsByType(elementType)) { Key key = entry.getKey(); - if (key.getTypeLiteral().equals(elementType) - && key.getAnnotation() instanceof Element - && ((Element) key.getAnnotation()).setName().equals("")) { + if (key.getTypeLiteral().equals(elementType) && key.getAnnotation() instanceof Element) { providers.add(entry.getProvider()); } } @@ -192,10 +190,6 @@ public Set get() { return Collections.unmodifiableSet(result); } - static String getSetName() { - return ""; - } - Key> getSetKey() { return setKey; } diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/RealElement.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/RealElement.java index 050a9c2fff590..a1c403744a23d 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/RealElement.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/RealElement.java @@ -26,16 +26,9 @@ class RealElement implements Element { private static final AtomicInteger nextUniqueId = new AtomicInteger(1); private final int uniqueId; - private final String setName; - RealElement(String setName) { + RealElement() { uniqueId = nextUniqueId.getAndIncrement(); - this.setName = setName; - } - - @Override - public String setName() { - return setName; } @Override @@ -50,16 +43,16 @@ public Class annotationType() { @Override public String toString() { - return "@" + Element.class.getName() + "(setName=" + setName + ",uniqueId=" + uniqueId + ")"; + return "@" + Element.class.getName() + "(uniqueId=" + uniqueId + ")"; } @Override public boolean equals(Object o) { - return o instanceof Element && ((Element) o).setName().equals(setName()) && ((Element) o).uniqueId() == uniqueId(); + return ((Element) o).uniqueId() == uniqueId(); } @Override public int hashCode() { - return 127 * ("setName".hashCode() ^ setName.hashCode()) + 127 * ("uniqueId".hashCode() ^ uniqueId); + return Integer.hashCode(uniqueId); } } diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java index dcfe9733e5e8b..094e869d8caa8 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java @@ -33,11 +33,6 @@ public interface ElementVisitor { */ V visit(Binding binding); - /** - * Visit a registration of a scope annotation with the scope that implements it. - */ - V visit(ScopeBinding binding); - /** * Visit a lookup of the provider for a type. */ diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java b/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java index 22f86d6991e84..0115abcfa3f03 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java @@ -24,8 +24,6 @@ import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.PrivateBinder; import org.elasticsearch.common.inject.Provider; -import org.elasticsearch.common.inject.Scope; -import org.elasticsearch.common.inject.Stage; import org.elasticsearch.common.inject.TypeLiteral; import org.elasticsearch.common.inject.binder.AnnotatedBindingBuilder; import org.elasticsearch.common.inject.internal.AbstractBindingBuilder; @@ -33,7 +31,6 @@ import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.SourceProvider; -import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -54,14 +51,14 @@ public final class Elements { * Records the elements executed by {@code modules}. */ public static List getElements(Module... modules) { - return getElements(Stage.DEVELOPMENT, Arrays.asList(modules)); + return getElements(Arrays.asList(modules)); } /** * Records the elements executed by {@code modules}. */ - public static List getElements(Stage stage, Iterable modules) { - RecordingBinder binder = new RecordingBinder(stage); + public static List getElements(Iterable modules) { + RecordingBinder binder = new RecordingBinder(); for (Module module : modules) { binder.install(module); } @@ -69,7 +66,6 @@ public static List getElements(Stage stage, Iterable } private static class RecordingBinder implements Binder, PrivateBinder { - private final Stage stage; private final Set modules; private final List elements; private final Object source; @@ -80,8 +76,7 @@ private static class RecordingBinder implements Binder, PrivateBinder { */ private final RecordingBinder parent; - private RecordingBinder(Stage stage) { - this.stage = stage; + private RecordingBinder() { this.modules = new HashSet<>(); this.elements = new ArrayList<>(); this.source = null; @@ -103,7 +98,6 @@ private RecordingBinder(RecordingBinder prototype, Object source, SourceProvider throw new IllegalArgumentException(); } - this.stage = prototype.stage; this.modules = prototype.modules; this.elements = prototype.elements; this.source = source; @@ -111,11 +105,6 @@ private RecordingBinder(RecordingBinder prototype, Object source, SourceProvider this.parent = prototype.parent; } - @Override - public void bindScope(Class annotationType, Scope scope) { - elements.add(new ScopeBinding(getSource(), annotationType, scope)); - } - @Override public void install(Module module) { if (modules.add(module)) { diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java b/server/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java deleted file mode 100644 index 958562a528a5b..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/ScopeBinding.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (C) 2008 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject.spi; - -import org.elasticsearch.common.inject.Scope; - -import java.lang.annotation.Annotation; -import java.util.Objects; - -/** - * Registration of a scope annotation with the scope that implements it. Instances are created - * explicitly in a module using {@link org.elasticsearch.common.inject.Binder#bindScope(Class, Scope) bindScope()} - * statements: - *

    - *     Scope recordScope = new RecordScope();
    - *     bindScope(RecordScoped.class, new RecordScope());
    - * - * @author jessewilson@google.com (Jesse Wilson) - * @since 2.0 - */ -public final class ScopeBinding implements Element { - private final Object source; - private final Class annotationType; - private final Scope scope; - - ScopeBinding(Object source, Class annotationType, Scope scope) { - this.source = Objects.requireNonNull(source, "source"); - this.annotationType = Objects.requireNonNull(annotationType, "annotationType"); - this.scope = Objects.requireNonNull(scope, "scope"); - } - - @Override - public Object getSource() { - return source; - } - - public Class getAnnotationType() { - return annotationType; - } - - public Scope getScope() { - return scope; - } - - @Override - public T acceptVisitor(ElementVisitor visitor) { - return visitor.visit(this); - } - -} From 8982513ffe26d149b9467b89db8808c1b4973736 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 1 Apr 2024 10:28:59 -0700 Subject: [PATCH 48/69] Adjust array resizing in block builder (#106934) I looked into an async profiler and found that AbstractBlockBuilder#updatePosition was consuming a significant amount of CPU. This is because we're growing the firstValueIndexes array one by one. While this minimizes wasted memory, it requires more CPU. I think we should use ArrayUtil.oversize() to resize this array. In contrast, it appears that we're growing the values array too quickly by 50% each time. I think we should use ArrayUtil.oversize() with a growth rate of 1/8 here as well. --- docs/changelog/106934.yaml | 5 +++ .../compute/data/BooleanArrayVector.java | 9 +++-- .../compute/data/BooleanBlockBuilder.java | 33 +++++++------------ .../compute/data/DoubleArrayVector.java | 9 +++-- .../compute/data/DoubleBlockBuilder.java | 33 +++++++------------ .../compute/data/IntArrayVector.java | 9 +++-- .../compute/data/IntBlockBuilder.java | 33 +++++++------------ .../compute/data/LongArrayVector.java | 9 +++-- .../compute/data/LongBlockBuilder.java | 33 +++++++------------ .../compute/data/AbstractArrayBlock.java | 4 +-- .../compute/data/AbstractBlockBuilder.java | 17 +++++----- .../compute/data/X-ArrayVector.java.st | 9 +++-- .../compute/data/X-BlockBuilder.java.st | 33 +++++++------------ 13 files changed, 110 insertions(+), 126 deletions(-) create mode 100644 docs/changelog/106934.yaml diff --git a/docs/changelog/106934.yaml b/docs/changelog/106934.yaml new file mode 100644 index 0000000000000..fbfce3118e8a6 --- /dev/null +++ b/docs/changelog/106934.yaml @@ -0,0 +1,5 @@ +pr: 106934 +summary: Adjust array resizing in block builder +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 63f02b14d9481..3cebcd75cbe7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** * Vector implementation that stores an array of boolean values. @@ -112,7 +113,11 @@ public int hashCode() { @Override public String toString() { - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 988106779a9d5..09c436e805d57 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -212,28 +212,19 @@ public BooleanBlock build() { BooleanBlock theBlock; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantBooleanBlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.newBooleanArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newBooleanArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.newBooleanArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.newBooleanArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index a7868beaf5db8..451b6cc7b655b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** * Vector implementation that stores an array of double values. @@ -111,7 +112,11 @@ public int hashCode() { @Override public String toString() { - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 9a157cdcef50e..427127784869a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -210,28 +210,19 @@ public DoubleBlock build() { DoubleBlock theBlock; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantDoubleBlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.newDoubleArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newDoubleArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.newDoubleArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.newDoubleArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index 644af9ae512a8..5273ab0546151 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** * Vector implementation that stores an array of int values. @@ -111,7 +112,11 @@ public int hashCode() { @Override public String toString() { - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index d49f5af05a8a7..aaf46798fd789 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -210,28 +210,19 @@ public IntBlock build() { IntBlock theBlock; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantIntBlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.newIntArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newIntArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.newIntArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.newIntArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index b3cee58356d70..6eec82528c8da 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; /** * Vector implementation that stores an array of long values. @@ -111,7 +112,11 @@ public int hashCode() { @Override public String toString() { - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index b74831599276b..5d8daf306809d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -210,28 +210,19 @@ public LongBlock build() { LongBlock theBlock; if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantLongBlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.newLongArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newLongArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.newLongArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.newLongArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java index 81098cba393bb..f163c630e259c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractArrayBlock.java @@ -61,9 +61,9 @@ protected final BitSet shiftNullsToExpandedPositions() { private boolean assertInvariants() { if (firstValueIndexes != null) { - assert firstValueIndexes.length == getPositionCount() + 1; + assert firstValueIndexes.length >= getPositionCount() + 1 : firstValueIndexes.length + " < " + positionCount; for (int i = 0; i < getPositionCount(); i++) { - assert (firstValueIndexes[i + 1] - firstValueIndexes[i]) >= 0; + assert firstValueIndexes[i + 1] >= firstValueIndexes[i] : firstValueIndexes[i + 1] + " < " + firstValueIndexes[i]; } } if (nullsMask != null) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java index 24303ff0ea0a4..abf3a243b7682 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java @@ -7,7 +7,8 @@ package org.elasticsearch.compute.data; -import java.util.Arrays; +import org.apache.lucene.util.ArrayUtil; + import java.util.BitSet; import java.util.stream.IntStream; @@ -139,7 +140,7 @@ protected final void ensureCapacity() { if (valueCount < valuesLength) { return; } - int newSize = calculateNewArraySize(valuesLength); + int newSize = ArrayUtil.oversize(valueCount, elementSize()); adjustBreaker(newSize * elementSize()); growValuesArray(newSize); adjustBreaker(-valuesLength * elementSize()); @@ -159,11 +160,6 @@ public final void close() { */ protected void extraClose() {} - static int calculateNewArraySize(int currentSize) { - // trivially, grows array by 50% - return currentSize + (currentSize >> 1); - } - protected void adjustBreaker(long deltaBytes) { blockFactory.adjustBreaker(deltaBytes); estimatedBytes += deltaBytes; @@ -173,8 +169,11 @@ protected void adjustBreaker(long deltaBytes) { private void setFirstValue(int position, int value) { if (position >= firstValueIndexes.length) { final int currentSize = firstValueIndexes.length; - adjustBreaker((long) (position + 1 - currentSize) * Integer.BYTES); - firstValueIndexes = Arrays.copyOf(firstValueIndexes, position + 1); + // We grow the `firstValueIndexes` at the same rate as the `values` array, but independently. + final int newLength = ArrayUtil.oversize(position + 1, Integer.BYTES); + adjustBreaker((long) newLength * Integer.BYTES); + firstValueIndexes = ArrayUtil.growExact(firstValueIndexes, newLength); + adjustBreaker(-(long) currentSize * Integer.BYTES); } firstValueIndexes[position] = value; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index b5ecb2cad4a56..9615ce83215e8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -23,7 +23,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; +import java.util.stream.Collectors; +import java.util.stream.IntStream; $endif$ /** @@ -173,7 +174,11 @@ $endif$ $if(BytesRef)$ return getClass().getSimpleName() + "[positions=" + getPositionCount() + ']'; $else$ - return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + Arrays.toString(values) + ']'; + String valuesString = IntStream.range(0, getPositionCount()) + .limit(10) + .mapToObj(n -> String.valueOf(values[n])) + .collect(Collectors.joining(", ", "[", getPositionCount() > 10 ? ", ...]" : "]")); + return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", values=" + valuesString + ']'; $endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index f4ee6c145f3ed..5b432f1c62968 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -327,28 +327,19 @@ $endif$ $else$ if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstant$Type$BlockWith(values[0], 1, estimatedBytes); + } else if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); + } else if (isDense() && singleValued()) { + theBlock = blockFactory.new$Type$ArrayVector(values, positionCount, estimatedBytes).asBlock(); } else { - if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { - theBlock = buildBigArraysBlock(); - } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - adjustBreaker(valueCount * elementSize()); - values = Arrays.copyOf(values, valueCount); - adjustBreaker(-values.length * elementSize()); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.new$Type$ArrayVector(values, positionCount, estimatedBytes).asBlock(); - } else { - theBlock = blockFactory.new$Type$ArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); - } - } + theBlock = blockFactory.new$Type$ArrayBlock( + values, // stylecheck + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); } $endif$ built(); From 3810a9164eecac3478675251ef601156a3bc6ed1 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 1 Apr 2024 12:49:29 -0700 Subject: [PATCH 49/69] Block readiness on file settings being applied (#106437) The readiness service is supposed to wait on the cluster being formed as well as file settings being applied. Yet file settings application is only checked on the master node. Since master election itself does not guarantee file settings have been applied, non-master nodes may mark themselves as ready even though they should not yet accept requests. This commit reworks how the readiness service looks ofr file settings being applied. Previously it only worked on the master node where it got a callback directly from the file settings service. With this change we now only look at cluster state. Note that this means the readiness service and file based settings are tightly coupled. In practice this was always the case as they are both meant to be used in cloud environments, but the coupling is a bit tighter now since the readiness service will never report ready unless file based settings exist. --- .../readiness/ReadinessClusterIT.java | 66 ++++++++++++++----- .../file/AbstractFileWatchingService.java | 3 + .../java/org/elasticsearch/node/Node.java | 4 -- .../readiness/ReadinessService.java | 33 ++++------ .../service/FileSettingsService.java | 9 +++ .../service/ReservedClusterStateService.java | 21 ++++++ .../service/ReservedStateUpdateTask.java | 8 ++- .../AbstractFileWatchingServiceTests.java | 7 ++ .../readiness/ReadinessServiceTests.java | 42 +++++++----- .../shutdown/NodeShutdownReadinessIT.java | 28 ++++++++ 10 files changed, 165 insertions(+), 56 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java index 3c06a4c084e04..3d200e0fa195a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/readiness/ReadinessClusterIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; -import org.elasticsearch.cluster.metadata.ReservedStateHandlerMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -22,12 +21,13 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.InternalTestCluster; +import org.junit.Before; +import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -83,6 +83,13 @@ public class ReadinessClusterIT extends ESIntegTestCase { } }"""; + Path configDir; + + @Before + public void setupMasterConfigDir() throws IOException { + configDir = createTempDir(); + } + @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder settings = Settings.builder() @@ -91,6 +98,11 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return settings.build(); } + @Override + protected Path nodeConfigPath(int nodeOrdinal) { + return configDir; + } + @Override protected Collection> getMockPlugins() { final List> plugins = new ArrayList<>(super.getMockPlugins()); @@ -108,6 +120,7 @@ private void expectMasterNotFound() { public void testReadinessDuringRestarts() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); + writeFileSettings(testJSON); logger.info("--> start data node / non master node"); String dataNode = internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); @@ -143,6 +156,7 @@ public void testReadinessDuringRestarts() throws Exception { public void testReadinessDuringRestartsNormalOrder() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); + writeFileSettings(testJSON); logger.info("--> start master node"); String masterNode = internalCluster().startMasterOnlyNode(); internalCluster().validateClusterFormed(); @@ -222,16 +236,14 @@ public void clusterChanged(ClusterChangedEvent event) { return new Tuple<>(savedClusterState, metadataVersion); } - private void writeJSONFile(String node, String json) throws Exception { + private void writeFileSettings(String json) throws Exception { long version = versionCounter.incrementAndGet(); - - FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); - - Files.createDirectories(fileSettingsService.watchedFileDir()); Path tempFilePath = createTempFile(); + Path fileSettings = configDir.resolve("operator").resolve("settings.json"); + Files.createDirectories(fileSettings.getParent()); Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); - Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); + Files.move(tempFilePath, fileSettings, StandardCopyOption.ATOMIC_MOVE); logger.info("--> New file settings: [{}]", Strings.format(json, version)); } @@ -244,7 +256,7 @@ public void testNotReadyOnBadFileSettings() throws Exception { assertFalse(dataFileSettingsService.watching()); logger.info("--> write bad file settings before we boot master node"); - writeJSONFile(dataNode, testErrorJSON); + writeFileSettings(testErrorJSON); logger.info("--> start master node"); final String masterNode = internalCluster().startMasterOnlyNode( @@ -269,6 +281,31 @@ public void testNotReadyOnBadFileSettings() throws Exception { assertNull(s.boundAddress()); } + public void testReadyWhenMissingFileSettings() throws Exception { + internalCluster().setBootstrapMasterNodeIndex(0); + internalCluster().startNode(Settings.builder().put(dataOnlyNode()).put("discovery.initial_state_timeout", "1s")); + + final String masterNode = internalCluster().startMasterOnlyNode( + Settings.builder().put(INITIAL_STATE_TIMEOUT_SETTING.getKey(), "0s").build() + ); + assertMasterNode(internalCluster().nonMasterClient(), masterNode); + var savedClusterState = setupClusterStateListener(masterNode); + + // we need this after we setup the listener above, in case the node started and processed + // settings before we set our listener to cluster state changes. + causeClusterStateUpdate(); + + FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); + + assertTrue(masterFileSettingsService.watching()); + + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + ReadinessService s = internalCluster().getInstance(ReadinessService.class, masterNode); + assertNotNull(s.boundAddress()); + } + private Tuple setupClusterStateListener(String node) { ClusterService clusterService = internalCluster().clusterService(node); CountDownLatch savedClusterState = new CountDownLatch(1); @@ -278,12 +315,9 @@ private Tuple setupClusterStateListener(String node) public void clusterChanged(ClusterChangedEvent event) { ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); if (reservedState != null) { - ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedClusterSettingsAction.NAME); - if (handlerMetadata != null && handlerMetadata.keys().contains("indices.recovery.max_bytes_per_sec")) { - clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); - savedClusterState.countDown(); - } + clusterService.removeListener(this); + metadataVersion.set(event.state().metadata().version()); + savedClusterState.countDown(); } } }); @@ -308,7 +342,7 @@ public void testReadyAfterCorrectFileSettings() throws Exception { var savedClusterState = setupClusterStateListener(dataNode); logger.info("--> write correct file settings before we boot master node"); - writeJSONFile(dataNode, testJSON); + writeFileSettings(testJSON); logger.info("--> start master node"); final String masterNode = internalCluster().startMasterOnlyNode(); diff --git a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java index 4279733ec403a..49cb06a55a4e9 100644 --- a/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/AbstractFileWatchingService.java @@ -74,6 +74,8 @@ public AbstractFileWatchingService(Path watchedFile) { */ protected abstract void processFileChanges() throws InterruptedException, ExecutionException, IOException; + protected abstract void processInitialFileMissing() throws InterruptedException, ExecutionException, IOException; + public final void addFileChangedListener(FileChangedListener listener) { eventListeners.add(listener); } @@ -173,6 +175,7 @@ protected final void watcherThread() { logger.debug("found initial operator settings file [{}], applying...", path); processSettingsAndNotifyListeners(); } else { + processInitialFileMissing(); // Notify everyone we don't have any initial file settings for (var listener : eventListeners) { listener.watchedFileChanged(); diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 8ff2ac5e5fca0..801a2038fc06b 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -358,10 +358,6 @@ public Node start() throws NodeValidationException { final FileSettingsService fileSettingsService = injector.getInstance(FileSettingsService.class); fileSettingsService.start(); - // if we are using the readiness service, listen for the file settings being applied - if (ReadinessService.enabled(environment)) { - fileSettingsService.addFileChangedListener(injector.getInstance(ReadinessService.class)); - } clusterService.addStateApplier(transportService.getTaskManager()); // start after transport service so the local disco is known diff --git a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java index 7f7a55762bf08..b8f874a8bfcb5 100644 --- a/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java +++ b/server/src/main/java/org/elasticsearch/readiness/ReadinessService.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -21,7 +22,7 @@ import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; -import org.elasticsearch.reservedstate.service.FileChangedListener; +import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.shutdown.PluginShutdownService; import org.elasticsearch.transport.BindTransportException; @@ -38,7 +39,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; -public class ReadinessService extends AbstractLifecycleComponent implements ClusterStateListener, FileChangedListener { +public class ReadinessService extends AbstractLifecycleComponent implements ClusterStateListener { private static final Logger logger = LogManager.getLogger(ReadinessService.class); private final Environment environment; @@ -50,9 +51,6 @@ public class ReadinessService extends AbstractLifecycleComponent implements Clus volatile CountDownLatch listenerThreadLatch = new CountDownLatch(0); final AtomicReference boundSocket = new AtomicReference<>(); private final Collection boundAddressListeners = new CopyOnWriteArrayList<>(); - private volatile boolean fileSettingsApplied = false; - private volatile boolean masterElected = false; - private volatile boolean shuttingDown = false; public static final Setting PORT = Setting.intSetting("readiness.port", -1, Setting.Property.NodeScope); @@ -237,9 +235,7 @@ protected void doClose() {} public void clusterChanged(ClusterChangedEvent event) { ClusterState clusterState = event.state(); Set shutdownNodeIds = PluginShutdownService.shutdownNodes(clusterState); - - this.masterElected = clusterState.nodes().getMasterNodeId() != null; - this.shuttingDown = shutdownNodeIds.contains(clusterState.nodes().getLocalNodeId()); + boolean shuttingDown = shutdownNodeIds.contains(clusterState.nodes().getLocalNodeId()); if (shuttingDown) { // only disable the probe and log if the probe is running @@ -248,14 +244,19 @@ public void clusterChanged(ClusterChangedEvent event) { logger.info("marking node as not ready because it's shutting down"); } } else { - if (clusterState.nodes().getLocalNodeId().equals(clusterState.nodes().getMasterNodeId())) { - setReady(fileSettingsApplied); - } else { - setReady(masterElected); - } + boolean masterElected = clusterState.nodes().getMasterNodeId() != null; + boolean fileSettingsApplied = areFileSettingsApplied(clusterState); + logger.info("readiness: masterElected={}, fileSettingsApplied={}", masterElected, fileSettingsApplied); + setReady(masterElected && fileSettingsApplied); } } + // protected to allow mock service to override + protected boolean areFileSettingsApplied(ClusterState clusterState) { + ReservedStateMetadata fileSettingsMetadata = clusterState.metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); + return fileSettingsMetadata != null && fileSettingsMetadata.errorMetadata() == null; + } + private void setReady(boolean ready) { if (ready) { startListener(); @@ -277,12 +278,6 @@ public synchronized void addBoundAddressListener(BoundAddressListener listener) boundAddressListeners.add(listener); } - @Override - public void watchedFileChanged() { - fileSettingsApplied = true; - setReady(masterElected && (shuttingDown == false)); - } - /** * A listener to be notified when the readiness service establishes the port it's listening on. * The {@link #addressBound(BoundTransportAddress)} method is called after the readiness service socket diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index 56c975e148ab5..8719c8cbf8730 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -125,6 +126,14 @@ protected void processFileChanges() throws ExecutionException, InterruptedExcept completion.get(); } + @Override + protected void processInitialFileMissing() throws ExecutionException, InterruptedException, IOException { + PlainActionFuture completion = new PlainActionFuture<>(); + logger.info("setting file [{}] not found, initializing [{}] as empty", watchedFile(), NAMESPACE); + stateService.initEmpty(NAMESPACE, completion); + completion.get(); + } + private static void completeProcessing(Exception e, PlainActionFuture completion) { if (e != null) { completion.onFailure(e); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index 76c2007dc8d8e..d2aea19417787 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.RefCountingListener; @@ -146,6 +147,26 @@ public void process(String namespace, XContentParser parser, Consumer process(namespace, stateChunk, errorListener); } + public void initEmpty(String namespace, ActionListener listener) { + var missingVersion = new ReservedStateVersion(-1L, Version.CURRENT); + var emptyState = new ReservedStateChunk(Map.of(), missingVersion); + updateTaskQueue.submitTask( + "empty initial cluster state [" + namespace + "]", + new ReservedStateUpdateTask( + namespace, + emptyState, + List.of(), + Map.of(), + List.of(), + // error state should not be possible since there is no metadata being parsed or processed + errorState -> { throw new AssertionError(); }, + listener + ), + null + ); + + } + /** * Saves and reserves a chunk of the cluster state under a given 'namespace' from {@link XContentParser} * diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 08f576f4a37e0..2ee9aa0d86a0e 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -169,14 +169,18 @@ static boolean checkMetadataVersion( return false; } + // Version -1 is special, it means "empty" + if (reservedStateVersion.version() == -1L) { + return true; + } + // Version 0 is special, snapshot restores will reset to 0. if (reservedStateVersion.version() <= 0L) { logger.warn( () -> format( "Not updating reserved cluster state for namespace [%s], because version [%s] is less or equal to 0", namespace, - reservedStateVersion.version(), - existingMetadata.version() + reservedStateVersion.version() ) ); return false; diff --git a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java index e12312844e571..d3924bd10d240 100644 --- a/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/common/file/AbstractFileWatchingServiceTests.java @@ -73,6 +73,13 @@ protected void processFileChanges() throws InterruptedException, ExecutionExcept countDownLatch.countDown(); } } + + @Override + protected void processInitialFileMissing() { + if (countDownLatch != null) { + countDownLatch.countDown(); + } + } } private AbstractFileWatchingService fileWatchingService; diff --git a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java index e794752aff15e..68c2230fe138f 100644 --- a/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java +++ b/server/src/test/java/org/elasticsearch/readiness/ReadinessServiceTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.NodesShutdownMetadata; +import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -29,6 +30,7 @@ import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; +import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.readiness.ReadinessClientProbe; @@ -51,6 +53,12 @@ public class ReadinessServiceTests extends ESTestCase implements ReadinessClient private Environment env; private FakeHttpTransport httpTransport; + private static Metadata emptyReservedStateMetadata; + static { + var fileSettingsState = new ReservedStateMetadata.Builder(FileSettingsService.NAMESPACE).version(-1L); + emptyReservedStateMetadata = new Metadata.Builder().put(fileSettingsState.build()).build(); + } + static class FakeHttpTransport extends AbstractLifecycleComponent implements HttpServerTransport { final DiscoveryNode node; @@ -192,44 +200,49 @@ public void testStatusChange() throws Exception { // initially the service isn't ready assertFalse(readinessService.ready()); - ClusterState previousState = ClusterState.builder(new ClusterName("cluster")) + ClusterState emptyState = ClusterState.builder(new ClusterName("cluster")) .nodes( DiscoveryNodes.builder().add(DiscoveryNodeUtils.create("node2", new TransportAddress(TransportAddress.META_ADDRESS, 9201))) ) .build(); - ClusterState newState = ClusterState.builder(previousState) + ClusterState noFileSettingsState = ClusterState.builder(emptyState) .nodes( - DiscoveryNodes.builder(previousState.nodes()) + DiscoveryNodes.builder(emptyState.nodes()) .add(httpTransport.node) .masterNodeId(httpTransport.node.getId()) .localNodeId(httpTransport.node.getId()) ) .build(); - ClusterChangedEvent event = new ClusterChangedEvent("test", newState, previousState); + ClusterChangedEvent event = new ClusterChangedEvent("test", noFileSettingsState, emptyState); readinessService.clusterChanged(event); - readinessService.watchedFileChanged(); - // sending a cluster state with active master should bring up the service - assertTrue(readinessService.ready()); + // sending a cluster state with active master should not yet bring up the service, file settings still are not applied + assertFalse(readinessService.ready()); + + ClusterState completeState = ClusterState.builder(noFileSettingsState).metadata(emptyReservedStateMetadata).build(); + event = new ClusterChangedEvent("test", completeState, noFileSettingsState); + readinessService.clusterChanged(event); - previousState = newState; + // sending a cluster state with active master and file settings applied should bring up the service + assertTrue(readinessService.ready()); tcpReadinessProbeTrue(readinessService); - ClusterState noMasterState = ClusterState.builder(previousState).nodes(previousState.nodes().withMasterNodeId(null)).build(); - event = new ClusterChangedEvent("test", noMasterState, previousState); + ClusterState noMasterState = ClusterState.builder(completeState).nodes(completeState.nodes().withMasterNodeId(null)).build(); + event = new ClusterChangedEvent("test", noMasterState, completeState); readinessService.clusterChanged(event); assertFalse(readinessService.ready()); tcpReadinessProbeFalse(readinessService); - event = new ClusterChangedEvent("test", previousState, noMasterState); + event = new ClusterChangedEvent("test", completeState, noMasterState); readinessService.clusterChanged(event); assertTrue(readinessService.ready()); tcpReadinessProbeTrue(readinessService); - newState = ClusterState.builder(previousState) + // shutting down flips back to not ready + ClusterState nodeShuttingDownState = ClusterState.builder(completeState) .metadata( - Metadata.builder(previousState.metadata()) + Metadata.builder(completeState.metadata()) .putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( @@ -247,8 +260,7 @@ public void testStatusChange() throws Exception { .build() ) .build(); - - event = new ClusterChangedEvent("test", newState, previousState); + event = new ClusterChangedEvent("test", nodeShuttingDownState, completeState); var mockAppender = new MockLogAppender(); try (var ignored = mockAppender.capturing(ReadinessService.class)) { mockAppender.addExpectation( diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java index 87eaf4d37ae00..af0713665731c 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java @@ -17,7 +17,11 @@ import org.elasticsearch.readiness.MockReadinessService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -32,6 +36,30 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class NodeShutdownReadinessIT extends ESIntegTestCase { + Path configDir; + + @Before + public void setupMasterConfigDir() throws IOException { + configDir = createTempDir(); + Path settingsFile = configDir.resolve("operator").resolve("settings.json"); + Files.createDirectories(settingsFile.getParent()); + Files.writeString(settingsFile, """ + { + "metadata": { + "version": "1", + "compatibility": "8.4.0" + }, + "state": { + "cluster_settings": {} + } + }"""); + } + + @Override + protected Path nodeConfigPath(int nodeOrdinal) { + return configDir; + } + @Override protected Collection> getMockPlugins() { final List> plugins = new ArrayList<>(super.getMockPlugins()); From a3794e7584daa4a62b6691964fded7d2d080f3a0 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Mon, 1 Apr 2024 16:12:48 -0400 Subject: [PATCH 50/69] [DOCS] Remove orphaned cluster issues troubleshooing doc (#106959) --- .../how-to/fix-common-cluster-issues.asciidoc | 747 ------------------ 1 file changed, 747 deletions(-) delete mode 100644 docs/reference/how-to/fix-common-cluster-issues.asciidoc diff --git a/docs/reference/how-to/fix-common-cluster-issues.asciidoc b/docs/reference/how-to/fix-common-cluster-issues.asciidoc deleted file mode 100644 index 531ae44cc3be2..0000000000000 --- a/docs/reference/how-to/fix-common-cluster-issues.asciidoc +++ /dev/null @@ -1,747 +0,0 @@ -[[fix-common-cluster-issues]] -== Fix common cluster issues - -This guide describes how to fix common errors and problems with {es} clusters. - -[discrete] -=== Error: disk usage exceeded flood-stage watermark, index has read-only-allow-delete block - -This error indicates a data node is critically low on disk space and has reached -the <>. To prevent -a full disk, when a node reaches this watermark, {es} blocks writes to any index -with a shard on the node. If the block affects related system indices, {kib} and -other {stack} features may become unavailable. - -{es} will automatically remove the write block when the affected node's disk -usage goes below the <>. To -achieve this, {es} automatically moves some of the affected node's shards to -other nodes in the same data tier. - -To verify that shards are moving off the affected node, use the <>. - -[source,console] ----- -GET _cat/shards?v=true ----- - -If shards remain on the node, use the <> to get an explanation for their allocation status. - -[source,console] ----- -GET _cluster/allocation/explain -{ - "index": "my-index", - "shard": 0, - "primary": false, - "current_node": "my-node" -} ----- -// TEST[s/^/PUT my-index\n/] -// TEST[s/"primary": false,/"primary": false/] -// TEST[s/"current_node": "my-node"//] - -To immediately restore write operations, you can temporarily increase the disk -watermarks and remove the write block. - -[source,console] ----- -PUT _cluster/settings -{ - "persistent": { - "cluster.routing.allocation.disk.watermark.low": "90%", - "cluster.routing.allocation.disk.watermark.low.max_headroom": "100GB", - "cluster.routing.allocation.disk.watermark.high": "95%", - "cluster.routing.allocation.disk.watermark.high.max_headroom": "20GB", - "cluster.routing.allocation.disk.watermark.flood_stage": "97%", - "cluster.routing.allocation.disk.watermark.flood_stage.max_headroom": "5GB", - "cluster.routing.allocation.disk.watermark.flood_stage.frozen": "97%", - "cluster.routing.allocation.disk.watermark.flood_stage.frozen.max_headroom": "5GB" - } -} - -PUT */_settings?expand_wildcards=all -{ - "index.blocks.read_only_allow_delete": null -} ----- -// TEST[s/^/PUT my-index\n/] - -As a long-term solution, we recommend you add nodes to the affected data tiers -or upgrade existing nodes to increase disk space. To free up additional disk -space, you can delete unneeded indices using the <>. - -[source,console] ----- -DELETE my-index ----- -// TEST[s/^/PUT my-index\n/] - -When a long-term solution is in place, reset or reconfigure the disk watermarks. - -[source,console] ----- -PUT _cluster/settings -{ - "persistent": { - "cluster.routing.allocation.disk.watermark.low": null, - "cluster.routing.allocation.disk.watermark.low.max_headroom": null, - "cluster.routing.allocation.disk.watermark.high": null, - "cluster.routing.allocation.disk.watermark.high.max_headroom": null, - "cluster.routing.allocation.disk.watermark.flood_stage": null, - "cluster.routing.allocation.disk.watermark.flood_stage.max_headroom": null, - "cluster.routing.allocation.disk.watermark.flood_stage.frozen": null, - "cluster.routing.allocation.disk.watermark.flood_stage.frozen.max_headroom": null - } -} ----- - -[discrete] -[[circuit-breaker-errors]] -=== Circuit breaker errors - -{es} uses <> to prevent nodes from running out -of JVM heap memory. If Elasticsearch estimates an operation would exceed a -circuit breaker, it stops the operation and returns an error. - -By default, the <> triggers at -95% JVM memory usage. To prevent errors, we recommend taking steps to reduce -memory pressure if usage consistently exceeds 85%. - -[discrete] -[[diagnose-circuit-breaker-errors]] -==== Diagnose circuit breaker errors - -**Error messages** - -If a request triggers a circuit breaker, {es} returns an error with a `429` HTTP -status code. - -[source,js] ----- -{ - 'error': { - 'type': 'circuit_breaking_exception', - 'reason': '[parent] Data too large, data for [] would be [123848638/118.1mb], which is larger than the limit of [123273216/117.5mb], real usage: [120182112/114.6mb], new bytes reserved: [3666526/3.4mb]', - 'bytes_wanted': 123848638, - 'bytes_limit': 123273216, - 'durability': 'TRANSIENT' - }, - 'status': 429 -} ----- -// NOTCONSOLE - -{es} also writes circuit breaker errors to <>. This -is helpful when automated processes, such as allocation, trigger a circuit -breaker. - -[source,txt] ----- -Caused by: org.elasticsearch.common.breaker.CircuitBreakingException: [parent] Data too large, data for [] would be [num/numGB], which is larger than the limit of [num/numGB], usages [request=0/0b, fielddata=num/numKB, in_flight_requests=num/numGB, accounting=num/numGB] ----- - -**Check JVM memory usage** - -If you've enabled Stack Monitoring, you can view JVM memory usage in {kib}. In -the main menu, click **Stack Monitoring**. On the Stack Monitoring **Overview** -page, click **Nodes**. The **JVM Heap** column lists the current memory usage -for each node. - -You can also use the <> to get the current -`heap.percent` for each node. - -[source,console] ----- -GET _cat/nodes?v=true&h=name,node*,heap* ----- - -See <> for more details. - -To get the JVM memory usage for each circuit breaker, use the -<>. - -[source,console] ----- -GET _nodes/stats/breaker ----- - -[discrete] -[[prevent-circuit-breaker-errors]] -==== Prevent circuit breaker errors - -**Reduce JVM memory pressure** - -High JVM memory pressure often causes circuit breaker errors. See -<>. - -**Avoid using fielddata on `text` fields** - -For high-cardinality `text` fields, fielddata can use a large amount of JVM -memory. To avoid this, {es} disables fielddata on `text` fields by default. If -you've enabled fielddata and triggered the <>, consider disabling it and using a `keyword` field instead. -See <>. - -**Clear the fieldata cache** - -If you've triggered the fielddata circuit breaker and can't disable fielddata, -use the <> to clear the fielddata cache. -This may disrupt any in-flight searches that use fielddata. - -[source,console] ----- -POST _cache/clear?fielddata=true ----- -// TEST[s/^/PUT my-index\n/] - -[discrete] -[[high-cpu-usage]] -=== High CPU usage - -{es} uses <> to manage CPU resources for -concurrent operations. High CPU usage typically means one or more thread pools -are running low. - -If a thread pool is depleted, {es} will <> -related to the thread pool. For example, if the `search` thread pool is -depleted, {es} will reject search requests until more threads are available. - -[discrete] -[[diagnose-high-cpu-usage]] -==== Diagnose high CPU usage - -**Check CPU usage** - -include::{es-repo-dir}/tab-widgets/cpu-usage-widget.asciidoc[] - -**Check hot threads** - -If a node has high CPU usage, use the <> to check for resource-intensive threads running on the node. - -[source,console] ----- -GET _nodes/my-node,my-other-node/hot_threads ----- -// TEST[s/\/my-node,my-other-node//] - -This API returns a breakdown of any hot threads in plain text. - -[discrete] -[[reduce-cpu-usage]] -==== Reduce CPU usage - -The following tips outline the most common causes of high CPU usage and their -solutions. - -**Scale your cluster** - -Heavy indexing and search loads can deplete smaller thread pools. To better -handle heavy workloads, add more nodes to your cluster or upgrade your existing -nodes to increase capacity. - -**Spread out bulk requests** - -While more efficient than individual requests, large <> -or <> requests still require CPU resources. If -possible, submit smaller requests and allow more time between them. - -**Cancel long-running searches** - -Long-running searches can block threads in the `search` thread pool. To check -for these searches, use the <>. - -[source,console] ----- -GET _tasks?actions=*search&detailed ----- - -The response's `description` contains the search request and its queries. -`running_time_in_nanos` shows how long the search has been running. - -[source,console-result] ----- -{ - "nodes" : { - "oTUltX4IQMOUUVeiohTt8A" : { - "name" : "my-node", - "transport_address" : "127.0.0.1:9300", - "host" : "127.0.0.1", - "ip" : "127.0.0.1:9300", - "tasks" : { - "oTUltX4IQMOUUVeiohTt8A:464" : { - "node" : "oTUltX4IQMOUUVeiohTt8A", - "id" : 464, - "type" : "transport", - "action" : "indices:data/read/search", - "description" : "indices[my-index], search_type[QUERY_THEN_FETCH], source[{\"query\":...}]", - "start_time_in_millis" : 4081771730000, - "running_time_in_nanos" : 13991383, - "cancellable" : true - } - } - } - } -} ----- -// TESTRESPONSE[skip: no way to get tasks] - -To cancel a search and free up resources, use the API's `_cancel` endpoint. - -[source,console] ----- -POST _tasks/oTUltX4IQMOUUVeiohTt8A:464/_cancel ----- - -For additional tips on how to track and avoid resource-intensive searches, see -<>. - -[discrete] -[[high-jvm-memory-pressure]] -=== High JVM memory pressure - -High JVM memory usage can degrade cluster performance and trigger -<>. To prevent this, we recommend -taking steps to reduce memory pressure if a node's JVM memory usage consistently -exceeds 85%. - -[discrete] -[[diagnose-high-jvm-memory-pressure]] -==== Diagnose high JVM memory pressure - -**Check JVM memory pressure** - -include::{es-repo-dir}/tab-widgets/jvm-memory-pressure-widget.asciidoc[] - -**Check garbage collection logs** - -As memory usage increases, garbage collection becomes more frequent and takes -longer. You can track the frequency and length of garbage collection events in -<>. For example, the following event states {es} -spent more than 50% (21 seconds) of the last 40 seconds performing garbage -collection. - -[source,log] ----- -[timestamp_short_interval_from_last][INFO ][o.e.m.j.JvmGcMonitorService] [node_id] [gc][number] overhead, spent [21s] collecting in the last [40s] ----- - -[discrete] -[[reduce-jvm-memory-pressure]] -==== Reduce JVM memory pressure - -**Reduce your shard count** - -Every shard uses memory. In most cases, a small set of large shards uses fewer -resources than many small shards. For tips on reducing your shard count, see -<>. - -[[avoid-expensive-searches]] -**Avoid expensive searches** - -Expensive searches can use large amounts of memory. To better track expensive -searches on your cluster, enable <>. - -Expensive searches may have a large <>, -use aggregations with a large number of buckets, or include -<>. To prevent expensive -searches, consider the following setting changes: - -* Lower the `size` limit using the -<> index setting. - -* Decrease the maximum number of allowed aggregation buckets using the -<> cluster setting. - -* Disable expensive queries using the -<> cluster -setting. - -[source,console] ----- -PUT _settings -{ - "index.max_result_window": 5000 -} - -PUT _cluster/settings -{ - "persistent": { - "search.max_buckets": 20000, - "search.allow_expensive_queries": false - } -} ----- -// TEST[s/^/PUT my-index\n/] - -**Prevent mapping explosions** - -Defining too many fields or nesting fields too deeply can lead to -<> that use large amounts of memory. -To prevent mapping explosions, use the <> to limit the number of field mappings. - -**Spread out bulk requests** - -While more efficient than individual requests, large <> -or <> requests can still create high JVM -memory pressure. If possible, submit smaller requests and allow more time -between them. - -**Upgrade node memory** - -Heavy indexing and search loads can cause high JVM memory pressure. To better -handle heavy workloads, upgrade your nodes to increase their memory capacity. - -[discrete] -[[red-yellow-cluster-status]] -=== Red or yellow cluster status - -A red or yellow cluster status indicates one or more shards are missing or -unallocated. These unassigned shards increase your risk of data loss and can -degrade cluster performance. - -[discrete] -[[diagnose-cluster-status]] -==== Diagnose your cluster status - -**Check your cluster status** - -Use the <>. - -[source,console] ----- -GET _cluster/health?filter_path=status,*_shards ----- - -A healthy cluster has a green `status` and zero `unassigned_shards`. A yellow -status means only replicas are unassigned. A red status means one or -more primary shards are unassigned. - -**View unassigned shards** - -To view unassigned shards, use the <>. - -[source,console] ----- -GET _cat/shards?v=true&h=index,shard,prirep,state,node,unassigned.reason&s=state ----- - -Unassigned shards have a `state` of `UNASSIGNED`. The `prirep` value is `p` for -primary shards and `r` for replicas. - -To understand why an unassigned shard is not being assigned and what action -you must take to allow {es} to assign it, use the -<>. - -[source,console] ----- -GET _cluster/allocation/explain?filter_path=index,node_allocation_decisions.node_name,node_allocation_decisions.deciders.* -{ - "index": "my-index", - "shard": 0, - "primary": false -} ----- -// TEST[s/^/PUT my-index\n/] - -[discrete] -[[fix-red-yellow-cluster-status]] -==== Fix a red or yellow cluster status - -A shard can become unassigned for several reasons. The following tips outline the -most common causes and their solutions. - -**Re-enable shard allocation** - -You typically disable allocation during a <> or other -cluster maintenance. If you forgot to re-enable allocation afterward, {es} will -be unable to assign shards. To re-enable allocation, reset the -`cluster.routing.allocation.enable` cluster setting. - -[source,console] ----- -PUT _cluster/settings -{ - "persistent" : { - "cluster.routing.allocation.enable" : null - } -} ----- - -**Recover lost nodes** - -Shards often become unassigned when a data node leaves the cluster. This can -occur for several reasons, ranging from connectivity issues to hardware failure. -After you resolve the issue and recover the node, it will rejoin the cluster. -{es} will then automatically allocate any unassigned shards. - -To avoid wasting resources on temporary issues, {es} <> by one minute by default. If you've recovered a node and don’t want -to wait for the delay period, you can call the <> with no arguments to start the allocation process. The process runs -asynchronously in the background. - -[source,console] ----- -POST _cluster/reroute?metric=none ----- - -**Fix allocation settings** - -Misconfigured allocation settings can result in an unassigned primary shard. -These settings include: - -* <> index settings -* <> cluster settings -* <> cluster settings - -To review your allocation settings, use the <> and <> APIs. - -[source,console] ----- -GET my-index/_settings?flat_settings=true&include_defaults=true - -GET _cluster/settings?flat_settings=true&include_defaults=true ----- -// TEST[s/^/PUT my-index\n/] - -You can change the settings using the <> and <> APIs. - -**Allocate or reduce replicas** - -To protect against hardware failure, {es} will not assign a replica to the same -node as its primary shard. If no other data nodes are available to host the -replica, it remains unassigned. To fix this, you can: - -* Add a data node to the same tier to host the replica. - -* Change the `index.number_of_replicas` index setting to reduce the number of -replicas for each primary shard. We recommend keeping at least one replica per -primary. - -[source,console] ----- -PUT _settings -{ - "index.number_of_replicas": 1 -} ----- -// TEST[s/^/PUT my-index\n/] - -**Free up or increase disk space** - -{es} uses a <> to ensure data -nodes have enough disk space for incoming shards. By default, {es} does not -allocate shards to nodes using more than 85% of disk space. - -To check the current disk space of your nodes, use the <>. - -[source,console] ----- -GET _cat/allocation?v=true&h=node,shards,disk.* ----- - -If your nodes are running low on disk space, you have a few options: - -* Upgrade your nodes to increase disk space. - -* Delete unneeded indices to free up space. If you use {ilm-init}, you can -update your lifecycle policy to use <> or add a delete phase. If you no longer need to search the data, you -can use a <> to store it off-cluster. - -* If you no longer write to an index, use the <> or {ilm-init}'s <> to merge its -segments into larger ones. -+ -[source,console] ----- -POST my-index/_forcemerge ----- -// TEST[s/^/PUT my-index\n/] - -* If an index is read-only, use the <> or -{ilm-init}'s <> to reduce its primary shard count. -+ -[source,console] ----- -POST my-index/_shrink/my-shrunken-index ----- -// TEST[s/^/PUT my-index\n{"settings":{"index.number_of_shards":2,"blocks.write":true}}\n/] - -* If your node has a large disk capacity, you can increase the low disk -watermark or set it to an explicit byte value. -+ -[source,console] ----- -PUT _cluster/settings -{ - "persistent": { - "cluster.routing.allocation.disk.watermark.low": "30gb" - } -} ----- -// TEST[s/"30gb"/null/] - -**Reduce JVM memory pressure** - -Shard allocation requires JVM heap memory. High JVM memory pressure can trigger -<> that stop allocation and leave shards -unassigned. See <>. - -**Recover data for a lost primary shard** - -If a node containing a primary shard is lost, {es} can typically replace it -using a replica on another node. If you can't recover the node and replicas -don't exist or are irrecoverable, you'll need to re-add the missing data from a -<> or the original data source. - -WARNING: Only use this option if node recovery is no longer possible. This -process allocates an empty primary shard. If the node later rejoins the cluster, -{es} will overwrite its primary shard with data from this newer empty shard, -resulting in data loss. - -Use the <> to manually allocate the -unassigned primary shard to another data node in the same tier. Set -`accept_data_loss` to `true`. - -[source,console] ----- -POST _cluster/reroute?metric=none -{ - "commands": [ - { - "allocate_empty_primary": { - "index": "my-index", - "shard": 0, - "node": "my-node", - "accept_data_loss": "true" - } - } - ] -} ----- -// TEST[s/^/PUT my-index\n/] -// TEST[catch:bad_request] - -If you backed up the missing index data to a snapshot, use the -<> to restore the individual index. -Alternatively, you can index the missing data from the original data source. - -[discrete] -[[rejected-requests]] -=== Rejected requests - -When {es} rejects a request, it stops the operation and returns an error with a -`429` response code. Rejected requests are commonly caused by: - -* A <>. A depleted `search` or `write` -thread pool returns a `TOO_MANY_REQUESTS` error message. - -* A <>. - -* High <> that exceeds the -<>. - -[discrete] -[[check-rejected-tasks]] -==== Check rejected tasks - -To check the number of rejected tasks for each thread pool, use the -<>. A high ratio of `rejected` to -`completed` tasks, particularly in the `search` and `write` thread pools, means -{es} regularly rejects requests. - -[source,console] ----- -GET /_cat/thread_pool?v=true&h=id,name,active,rejected,completed ----- - -[discrete] -[[prevent-rejected-requests]] -==== Prevent rejected requests - -**Fix high CPU and memory usage** - -If {es} regularly rejects requests and other tasks, your cluster likely has high -CPU usage or high JVM memory pressure. For tips, see <> and -<>. - -**Prevent circuit breaker errors** - -If you regularly trigger circuit breaker errors, see <> -for tips on diagnosing and preventing them. - -[discrete] -[[task-queue-backlog]] -=== Task queue backlog - -A backlogged task queue can prevent tasks from completing and -put the cluster into an unhealthy state. -Resource constraints, a large number of tasks being triggered at once, -and long running tasks can all contribute to a backlogged task queue. - -[discrete] -[[diagnose-task-queue-backlog]] -==== Diagnose a task queue backlog - -**Check the thread pool status** - -A <> can result in <>. - -You can use the <> to -see the number of active threads in each thread pool and -how many tasks are queued, how many have been rejected, and how many have completed. - -[source,console] ----- -GET /_cat/thread_pool?v&s=t,n&h=type,name,node_name,active,queue,rejected,completed ----- - -**Inspect the hot threads on each node** - -If a particular thread pool queue is backed up, -you can periodically poll the <> API -to determine if the thread has sufficient -resources to progress and gauge how quickly it is progressing. - -[source,console] ----- -GET /_nodes/hot_threads ----- - -**Look for long running tasks** - -Long-running tasks can also cause a backlog. -You can use the <> API to get information about the tasks that are running. -Check the `running_time_in_nanos` to identify tasks that are taking an excessive amount of time to complete. - -[source,console] ----- -GET /_tasks?filter_path=nodes.*.tasks ----- - -[discrete] -[[resolve-task-queue-backlog]] -==== Resolve a task queue backlog - -**Increase available resources** - -If tasks are progressing slowly and the queue is backing up, -you might need to take steps to <>. - -In some cases, increasing the thread pool size might help. -For example, the `force_merge` thread pool defaults to a single thread. -Increasing the size to 2 might help reduce a backlog of force merge requests. - -**Cancel stuck tasks** - -If you find the active task's hot thread isn't progressing and there's a backlog, -consider canceling the task. From 3eeffebf75f40d7f3fff9d974b4a47619d8c7d3b Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 13:33:22 -0700 Subject: [PATCH 51/69] AwaitsFix #106964 --- .../index/mapper/vectors/DenseVectorFieldMapperTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 2d1b1cc9545db..e05cc92c8a76b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.bytes.BytesReference; @@ -64,6 +65,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106964") public class DenseVectorFieldMapperTests extends MapperTestCase { private static final IndexVersion INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION = IndexVersions.V_8_10_0; From 8fdd1aa1d0769c282780c8ea5f8f634bea7715dd Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 1 Apr 2024 16:34:15 -0400 Subject: [PATCH 52/69] Refactor geoip database properties (#106960) --- .../elasticsearch/ingest/geoip/Database.java | 182 ++++++++++++++++++ .../ingest/geoip/GeoIpProcessor.java | 156 ++------------- .../geoip/GeoIpProcessorFactoryTests.java | 18 +- .../ingest/geoip/GeoIpProcessorTests.java | 2 +- 4 files changed, 211 insertions(+), 147 deletions(-) create mode 100644 modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java new file mode 100644 index 0000000000000..b1c9c99ace03e --- /dev/null +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java @@ -0,0 +1,182 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest.geoip; + +import org.elasticsearch.core.Nullable; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +/** + * A high-level representation of a kind of geoip database that is supported by the {@link GeoIpProcessor}. + *

    + * A database has a set of properties that are valid to use with it (see {@link Database#properties()}), + * as well as a list of default properties to use if no properties are specified (see {@link Database#defaultProperties()}). + *

    + * See especially {@link Database#getDatabase(String, String)} which is used to obtain instances of this class. + */ +enum Database { + + City( + Set.of( + Property.IP, + Property.COUNTRY_ISO_CODE, + Property.COUNTRY_NAME, + Property.CONTINENT_NAME, + Property.REGION_ISO_CODE, + Property.REGION_NAME, + Property.CITY_NAME, + Property.TIMEZONE, + Property.LOCATION + ), + Set.of( + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_ISO_CODE, + Property.REGION_ISO_CODE, + Property.REGION_NAME, + Property.CITY_NAME, + Property.LOCATION + ) + ), + Country( + Set.of(Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE), + Set.of(Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE) + ), + Asn( + Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK), + Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK) + ); + + private static final String CITY_DB_SUFFIX = "-City"; + private static final String COUNTRY_DB_SUFFIX = "-Country"; + private static final String ASN_DB_SUFFIX = "-ASN"; + + /** + * Parses the passed-in databaseType (presumably from the passed-in databaseFile) and return the Database instance that is + * associated with that databaseType. + * + * @param databaseType the database type String from the metadata of the database file + * @param databaseFile the database file from which the database type was obtained + * @throws IllegalArgumentException if the databaseType is not associated with a Database instance + * @return the Database instance that is associated with the databaseType + */ + public static Database getDatabase(final String databaseType, final String databaseFile) { + Database database = null; + if (databaseType != null) { + if (databaseType.endsWith(Database.CITY_DB_SUFFIX)) { + database = Database.City; + } else if (databaseType.endsWith(Database.COUNTRY_DB_SUFFIX)) { + database = Database.Country; + } else if (databaseType.endsWith(Database.ASN_DB_SUFFIX)) { + database = Database.Asn; + } + } + + if (database == null) { + throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]"); + } + + return database; + } + + private final Set properties; + private final Set defaultProperties; + + Database(Set properties, Set defaultProperties) { + this.properties = properties; + this.defaultProperties = defaultProperties; + } + + /** + * @return a set representing all the valid properties for this database + */ + public Set properties() { + return properties; + } + + /** + * @return a set representing the default properties for this database + */ + public Set defaultProperties() { + return defaultProperties; + } + + /** + * Parse the given list of property names. + * + * @param propertyNames a list of property names to parse, or null to use the default properties for this database + * @throws IllegalArgumentException if any of the property names are not valid + * @return a set of parsed and validated properties + */ + public Set parseProperties(@Nullable final List propertyNames) { + if (propertyNames != null) { + final Set parsedProperties = new HashSet<>(); + for (String propertyName : propertyNames) { + parsedProperties.add(Property.parseProperty(this.properties, propertyName)); // n.b. this throws if a property is invalid + } + return Set.copyOf(parsedProperties); + } else { + // if propertyNames is null, then use the default properties + return this.defaultProperties; + } + } + + /** + * High-level database 'properties' that represent information that can be extracted from a geoip database. + */ + enum Property { + + IP, + COUNTRY_ISO_CODE, + COUNTRY_NAME, + CONTINENT_NAME, + REGION_ISO_CODE, + REGION_NAME, + CITY_NAME, + TIMEZONE, + LOCATION, + ASN, + ORGANIZATION_NAME, + NETWORK; + + /** + * Parses a string representation of a property into an actual Property instance. Not all properties that exist are + * valid for all kinds of databases, so this method validates the parsed value against the provided set of valid properties. + *

    + * See {@link Database#parseProperties(List)} where this is used. + * + * @param validProperties the valid properties against which to validate the parsed property value + * @param value the string representation to parse + * @return a parsed, validated Property + * @throws IllegalArgumentException if the value does not parse as a Property or if the parsed Property is not + * in the passed-in validProperties set + */ + private static Property parseProperty(final Set validProperties, final String value) { + try { + Property property = valueOf(value.toUpperCase(Locale.ROOT)); + if (validProperties.contains(property) == false) { + throw new IllegalArgumentException("invalid"); + } + return property; + } catch (IllegalArgumentException e) { + // put the properties in natural order before throwing so that we have reliable error messages -- this is a little + // bit inefficient, but we only do this validation at processor construction time so the cost is practically immaterial + Property[] properties = validProperties.toArray(new Property[0]); + Arrays.sort(properties); + throw new IllegalArgumentException( + "illegal property value [" + value + "]. valid values are " + Arrays.toString(properties) + ); + } + } + } +} diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 18ca9599f183c..ea17338c25bbf 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -25,19 +25,16 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Assertions; -import org.elasticsearch.core.Nullable; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.geoip.Database.Property; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Supplier; @@ -54,9 +51,6 @@ public final class GeoIpProcessor extends AbstractProcessor { + "Elasticsearch no longer includes the default Maxmind geoip databases. This setting will be removed in Elasticsearch 9.0"; public static final String TYPE = "geoip"; - private static final String CITY_DB_SUFFIX = "-City"; - private static final String COUNTRY_DB_SUFFIX = "-Country"; - private static final String ASN_DB_SUFFIX = "-ASN"; private final String field; private final Supplier isValid; @@ -167,18 +161,18 @@ public IngestDocument execute(IngestDocument ingestDocument) throws IOException private Map getGeoData(GeoIpDatabase geoIpDatabase, String ip) throws IOException { final String databaseType = geoIpDatabase.getDatabaseType(); - final InetAddress ipAddress = InetAddresses.forString(ip); - Map geoData; - if (databaseType.endsWith(CITY_DB_SUFFIX)) { - geoData = retrieveCityGeoData(geoIpDatabase, ipAddress); - } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { - geoData = retrieveCountryGeoData(geoIpDatabase, ipAddress); - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { - geoData = retrieveAsnGeoData(geoIpDatabase, ipAddress); - } else { - throw new ElasticsearchParseException("Unsupported database type [" + databaseType + "]", new IllegalStateException()); + final Database database; + try { + database = Database.getDatabase(databaseType, databaseFile); + } catch (IllegalArgumentException e) { + throw new ElasticsearchParseException(e.getMessage(), e); } - return geoData; + final InetAddress ipAddress = InetAddresses.forString(ip); + return switch (database) { + case City -> retrieveCityGeoData(geoIpDatabase, ipAddress); + case Country -> retrieveCountryGeoData(geoIpDatabase, ipAddress); + case Asn -> retrieveAsnGeoData(geoIpDatabase, ipAddress); + }; } @Override @@ -382,21 +376,6 @@ public GeoIpDatabase get() throws IOException { } public static final class Factory implements Processor.Factory { - static final Set DEFAULT_CITY_PROPERTIES = Set.of( - Property.CONTINENT_NAME, - Property.COUNTRY_NAME, - Property.COUNTRY_ISO_CODE, - Property.REGION_ISO_CODE, - Property.REGION_NAME, - Property.CITY_NAME, - Property.LOCATION - ); - static final Set DEFAULT_COUNTRY_PROPERTIES = Set.of( - Property.CONTINENT_NAME, - Property.COUNTRY_NAME, - Property.COUNTRY_ISO_CODE - ); - static final Set DEFAULT_ASN_PROPERTIES = Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK); private final GeoIpDatabaseProvider geoIpDatabaseProvider; @@ -443,21 +422,17 @@ public Processor create( } finally { geoIpDatabase.release(); } - if (databaseType == null - || (databaseType.endsWith(CITY_DB_SUFFIX) - || databaseType.endsWith(COUNTRY_DB_SUFFIX) - || databaseType.endsWith(ASN_DB_SUFFIX)) == false) { - throw newConfigurationException( - TYPE, - processorTag, - "database_file", - "Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]" - ); + + final Database database; + try { + database = Database.getDatabase(databaseType, databaseFile); + } catch (IllegalArgumentException e) { + throw newConfigurationException(TYPE, processorTag, "database_file", e.getMessage()); } final Set properties; try { - properties = Property.parseProperties(databaseType, propertyNames); + properties = database.parseProperties(propertyNames); } catch (IllegalArgumentException e) { throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); } @@ -485,99 +460,6 @@ public static boolean downloadDatabaseOnPipelineCreation(Map con } - enum Property { - - IP, - COUNTRY_ISO_CODE, - COUNTRY_NAME, - CONTINENT_NAME, - REGION_ISO_CODE, - REGION_NAME, - CITY_NAME, - TIMEZONE, - LOCATION, - ASN, - ORGANIZATION_NAME, - NETWORK; - - static final Set ALL_CITY_PROPERTIES = Set.of( - Property.IP, - Property.COUNTRY_ISO_CODE, - Property.COUNTRY_NAME, - Property.CONTINENT_NAME, - Property.REGION_ISO_CODE, - Property.REGION_NAME, - Property.CITY_NAME, - Property.TIMEZONE, - Property.LOCATION - ); - static final Set ALL_COUNTRY_PROPERTIES = Set.of( - Property.IP, - Property.CONTINENT_NAME, - Property.COUNTRY_NAME, - Property.COUNTRY_ISO_CODE - ); - static final Set ALL_ASN_PROPERTIES = Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK); - - private static Property parseProperty(Set validProperties, String value) { - try { - Property property = valueOf(value.toUpperCase(Locale.ROOT)); - if (validProperties.contains(property) == false) { - throw new IllegalArgumentException("invalid"); - } - return property; - } catch (IllegalArgumentException e) { - // put the properties in natural order before throwing so that we have reliable error messages -- this is a little - // bit inefficient, but we only do this validation at processor construction time so the cost is practically immaterial - Property[] properties = validProperties.toArray(new Property[0]); - Arrays.sort(properties); - throw new IllegalArgumentException( - "illegal property value [" + value + "]. valid values are " + Arrays.toString(properties) - ); - } - } - - /** - * Parse the given list of property names and validate them against the supplied databaseType. - * - * @param databaseType the type of database to use to validate property names - * @param propertyNames a list of property names to parse, or null to use the default properties for the associated databaseType - * @throws IllegalArgumentException if any of the property names are not valid, or if the databaseType is not valid - * @return a set of parsed and validated properties - */ - public static Set parseProperties(final String databaseType, @Nullable final List propertyNames) { - final Set validProperties; - final Set defaultProperties; - - if (databaseType.endsWith(CITY_DB_SUFFIX)) { - validProperties = ALL_CITY_PROPERTIES; - defaultProperties = Factory.DEFAULT_CITY_PROPERTIES; - } else if (databaseType.endsWith(COUNTRY_DB_SUFFIX)) { - validProperties = ALL_COUNTRY_PROPERTIES; - defaultProperties = Factory.DEFAULT_COUNTRY_PROPERTIES; - } else if (databaseType.endsWith(ASN_DB_SUFFIX)) { - validProperties = ALL_ASN_PROPERTIES; - defaultProperties = Factory.DEFAULT_ASN_PROPERTIES; - } else { - assert false : "Unsupported database type [" + databaseType + "]"; - throw new IllegalArgumentException("Unsupported database type [" + databaseType + "]"); - } - - final Set properties; - if (propertyNames != null) { - Set modifiableProperties = new HashSet<>(); - for (String propertyName : propertyNames) { - modifiableProperties.add(parseProperty(validProperties, propertyName)); // n.b. this throws if a property is invalid - } - properties = Set.copyOf(modifiableProperties); - } else { - // if propertyNames is null, then use the default properties for the databaseType - properties = defaultProperties; - } - return properties; - } - } - static class DatabaseUnavailableProcessor extends AbstractProcessor { private final String databaseName; diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 24f2df7e30d16..99330224451ca 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.geoip.GeoIpProcessor.Property; +import org.elasticsearch.ingest.geoip.Database.Property; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; @@ -100,7 +100,7 @@ public void testBuildDefaults() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-City")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.City.defaultProperties())); assertFalse(processor.isIgnoreMissing()); } @@ -117,7 +117,7 @@ public void testSetIgnoreMissing() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-City")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.City.defaultProperties())); assertTrue(processor.isIgnoreMissing()); } @@ -135,7 +135,7 @@ public void testCountryBuildDefaults() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-Country")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.Country.defaultProperties())); assertFalse(processor.isIgnoreMissing()); } @@ -153,7 +153,7 @@ public void testAsnBuildDefaults() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-ASN")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_ASN_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.Asn.defaultProperties())); assertFalse(processor.isIgnoreMissing()); } @@ -177,7 +177,7 @@ public void testBuildDbFile() throws Exception { assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDatabaseType(), equalTo("GeoLite2-Country")); - assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES)); + assertThat(processor.getProperties(), sameInstance(Database.Country.defaultProperties())); assertFalse(processor.isIgnoreMissing()); } @@ -186,7 +186,7 @@ public void testBuildWithCountryDbAndAsnFields() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); - Set asnOnlyProperties = new HashSet<>(Property.ALL_ASN_PROPERTIES); + Set asnOnlyProperties = new HashSet<>(Database.Asn.properties()); asnOnlyProperties.remove(Property.IP); String asnProperty = RandomPicks.randomFrom(Randomness.get(), asnOnlyProperties).toString(); config.put("properties", List.of(asnProperty)); @@ -206,7 +206,7 @@ public void testBuildWithAsnDbAndCityFields() throws Exception { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-ASN.mmdb"); - Set cityOnlyProperties = new HashSet<>(Property.ALL_CITY_PROPERTIES); + Set cityOnlyProperties = new HashSet<>(Database.City.properties()); cityOnlyProperties.remove(Property.IP); String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); config.put("properties", List.of(cityProperty)); @@ -251,7 +251,7 @@ public void testBuildFields() throws Exception { int counter = 0; int numFields = scaledRandomIntBetween(1, Property.values().length); - for (Property property : Property.ALL_CITY_PROPERTIES) { + for (Property property : Database.City.properties()) { properties.add(property); fieldNames.add(property.name().toLowerCase(Locale.ROOT)); if (++counter >= numFields) { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 3114d24ee7571..b40845246deb3 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.geoip.GeoIpProcessor.Property; +import org.elasticsearch.ingest.geoip.Database.Property; import org.elasticsearch.test.ESTestCase; import java.io.IOException; From f9d96ae72d6de78a884e79ef1639a92460df43cb Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 13:44:21 -0700 Subject: [PATCH 53/69] AwaitsFix #106900 --- .../org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index 4726424ada5f2..bf16456c7476e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -228,6 +228,7 @@ protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) } @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106900") public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { addDocument(iw, List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0.1\"]}")))); From 9077c5d420a0fb452e1e81c5e6b2777a06489067 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 14:04:56 -0700 Subject: [PATCH 54/69] AwaitsFix #106968 --- .../reservedstate/service/FileSettingsServiceTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 5968be34e985a..b309f10903d09 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -253,6 +253,7 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { deadThreadLatch.countDown(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public void testStopWorksIfProcessingDidntReturnYet() throws Exception { var spiedController = spy(controller); var service = new FileSettingsService(clusterService, spiedController, env); From 5cc4a56b1f57f6b8b3da4f43f8e088fe17cecd03 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 1 Apr 2024 14:09:52 -0700 Subject: [PATCH 55/69] AwaitsFix #106939 --- .../rest-api-spec/test/esql/80_text.yml | 52 ++++++++++--------- 1 file changed, 28 insertions(+), 24 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml index 329f835a42659..17470af049a45 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/80_text.yml @@ -54,8 +54,8 @@ setup: - match: { columns.4.type: "text" } - length: { values: 2 } - - match: { values.0: [ 10, "IT Director", "IT Director", "Jenny", "foo bar"] } - - match: { values.1: [ 20, "Payroll Specialist", "Payroll Specialist", "John", "baz"] } + - match: { values.0: [ 10, "IT Director", "IT Director", "Jenny", "foo bar" ] } + - match: { values.1: [ 20, "Payroll Specialist", "Payroll Specialist", "John", "baz" ] } --- @@ -77,7 +77,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "like by text": @@ -98,7 +98,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "rlike by text": @@ -119,7 +119,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "IN on text": @@ -144,7 +144,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "IN on text and itself": @@ -169,8 +169,8 @@ setup: - match: { columns.3.type: "text" } - length: { values: 2 } - - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } - - match: { values.1: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar" ] } + - match: { values.1: [ 20, "John", "Payroll Specialist", "baz" ] } --- "NOT IN on text": @@ -195,7 +195,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar" ] } --- "eval and filter text": @@ -216,7 +216,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "filter on text multi-field": @@ -237,7 +237,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar" ] } --- "like by multi-field text": @@ -258,7 +258,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- "rlike by multi-field text": @@ -279,7 +279,7 @@ setup: - match: { columns.3.type: "text" } - length: { values: 1 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } --- @@ -301,8 +301,8 @@ setup: - match: { columns.3.type: "text" } - length: { values: 2 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } - - match: { values.1: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } + - match: { values.1: [ 10, "Jenny", "IT Director", "foo bar" ] } --- @@ -324,8 +324,8 @@ setup: - match: { columns.3.type: "text" } - length: { values: 2 } - - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar"] } - - match: { values.1: [ 20, "John", "Payroll Specialist", "baz"] } + - match: { values.0: [ 10, "Jenny", "IT Director", "foo bar" ] } + - match: { values.1: [ 20, "John", "Payroll Specialist", "baz" ] } --- "sort by text multi-field desc": @@ -346,8 +346,8 @@ setup: - match: { columns.3.type: "text" } - length: { values: 2 } - - match: { values.0: [ 20, "John", "Payroll Specialist", "baz"] } - - match: { values.1: [ 10, "Jenny", "IT Director", "foo bar"] } + - match: { values.0: [ 20, "John", "Payroll Specialist", "baz" ] } + - match: { values.1: [ 10, "Jenny", "IT Director", "foo bar" ] } --- @@ -363,8 +363,8 @@ setup: - match: { columns.0.type: "keyword" } - length: { values: 2 } - - match: { values.0: [ "Jenny - IT Director"] } - - match: { values.1: [ "John - Payroll Specialist"] } + - match: { values.0: [ "Jenny - IT Director" ] } + - match: { values.1: [ "John - Payroll Specialist" ] } --- "split text": @@ -383,8 +383,8 @@ setup: - match: { columns.0.type: "keyword" } - length: { values: 2 } - - match: { values.0: [ ["foo", "bar"] ] } - - match: { values.1: [ "baz"] } + - match: { values.0: [ [ "foo", "bar" ] ] } + - match: { values.1: [ "baz" ] } --- @@ -521,7 +521,7 @@ setup: refresh: true body: - { "index": { } } - - { "emp_no": 10, "name": "Jenny", "job": "IT Director"} + - { "emp_no": 10, "name": "Jenny", "job": "IT Director" } - { "index": { } } - { "emp_no": 20, "name": "John", "job": "Payroll Specialist" } @@ -541,6 +541,10 @@ setup: --- values: + - skip: + version: all + reason: 'AwaitsFix https://github.com/elastic/elasticsearch/issues/106939' + - requires: cluster_features: esql.agg_values reason: "values is available in 8.14+" From 601176364e0b1bd9e141e5bc8e8a796d9ff99c63 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 1 Apr 2024 15:52:27 -0600 Subject: [PATCH 56/69] Call out `monitor` privilege for index and component templates (#106970) These can be retrieved using the `monitor` cluster privilege. This just adds that note to the docs. --- docs/reference/indices/get-component-template.asciidoc | 2 +- docs/reference/indices/get-index-template.asciidoc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/indices/get-component-template.asciidoc b/docs/reference/indices/get-component-template.asciidoc index f3073406be2b1..f35192ca448db 100644 --- a/docs/reference/indices/get-component-template.asciidoc +++ b/docs/reference/indices/get-component-template.asciidoc @@ -51,7 +51,7 @@ GET /_component_template/template_1 * If the {es} {security-features} are enabled, you must have the `manage_index_templates` or `manage` <> to use this API. +privilege>> to update templates, or the `monitor` cluster privilege to retrieve templates. [[get-component-template-api-path-params]] ==== {api-path-parms-title} diff --git a/docs/reference/indices/get-index-template.asciidoc b/docs/reference/indices/get-index-template.asciidoc index 9ae8af6f8441b..2cde5adc8ae23 100644 --- a/docs/reference/indices/get-index-template.asciidoc +++ b/docs/reference/indices/get-index-template.asciidoc @@ -46,7 +46,7 @@ GET /_index_template/template_1 * If the {es} {security-features} are enabled, you must have the `manage_index_templates` or `manage` <> to use this API. +privilege>> to use this API, or the `monitor` cluster privilege to retrieve templates. [[get-template-api-path-params]] ==== {api-path-parms-title} From 22ca5ecfd36e79cad967a129741400b7fecf2657 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 1 Apr 2024 16:19:59 -0600 Subject: [PATCH 57/69] Fix warning typo for test failure (#106971) This had the wrong index patterns, which led to test failures Resolves #106965 --- .../resources/rest-api-spec/test/data_stream/10_basic.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index 23f8715b5787f..6187c1a0dfed1 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -543,7 +543,7 @@ setup: - do: allowed_warnings: - - "index template [my-template4] has index patterns [failure-data-stream1, failure-data-stream2] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template4] will take precedence during new index creation" + - "index template [my-template4] has index patterns [failure-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template4] will take precedence during new index creation" indices.put_index_template: name: my-template4 body: From 393e644165181c114925ee179ca43fef02e99579 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 2 Apr 2024 00:44:44 +0200 Subject: [PATCH 58/69] Refactor more ActionListener.wrap to delegateFailureAndWrap (#106948) Refactoring a couple more of these and inlining some listeners while at it to save code/memory and get cleaner guarantees around note leaking things. Note: In two cases inlining makes it obvious that we can remove spurious AtomicReference usage. --- .../execution/search/PITAwareQueryClient.java | 2 +- .../TransportGetTrainedModelsAction.java | 154 +++++++++--------- .../ml/action/TransportMlMemoryAction.java | 46 +++--- .../InferencePipelineAggregationBuilder.java | 8 +- .../ml/datafeed/DatafeedContextProvider.java | 36 ++-- .../persistence/DatafeedConfigProvider.java | 20 ++- .../dataframe/DataFrameAnalyticsManager.java | 26 +-- .../xpack/ml/dataframe/DestinationIndex.java | 81 +++++---- .../DataFrameAnalyticsConfigProvider.java | 2 +- .../deployment/DeploymentManager.java | 10 +- .../retention/ExpiredAnnotationsRemover.java | 12 +- .../retention/ExpiredForecastsRemover.java | 12 +- .../ExpiredModelSnapshotsRemover.java | 23 ++- .../job/retention/ExpiredResultsRemover.java | 12 +- .../xpack/ml/process/MlMemoryTracker.java | 8 +- .../ExpiredAnnotationsRemoverTests.java | 1 + .../ExpiredModelSnapshotsRemoverTests.java | 1 + .../retention/ExpiredResultsRemoverTests.java | 1 + 18 files changed, 213 insertions(+), 242 deletions(-) diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java index befb2c7503515..cce3cdeb97961 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java @@ -125,7 +125,7 @@ private ActionListener pitListener(Function {}, ex -> {})); + close(ActionListener.noop()); } } ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java index 78d030d454f0b..b250df8d5215f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.Tuple; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportService; @@ -65,82 +64,6 @@ public TransportGetTrainedModelsAction( @Override protected void doExecute(Task task, Request request, ActionListener listener) { final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); - - Response.Builder responseBuilder = Response.builder(); - - ActionListener> getModelDefinitionStatusListener = ActionListener.wrap(configs -> { - if (request.getIncludes().isIncludeDefinitionStatus() == false) { - listener.onResponse(responseBuilder.setModels(configs).build()); - return; - } - - assert configs.size() <= 1; - if (configs.isEmpty()) { - listener.onResponse(responseBuilder.setModels(configs).build()); - return; - } - - if (configs.get(0).getModelType() != TrainedModelType.PYTORCH) { - listener.onFailure(ExceptionsHelper.badRequestException("Definition status is only relevant to PyTorch model types")); - return; - } - - TransportStartTrainedModelDeploymentAction.checkFullModelDefinitionIsPresent( - new OriginSettingClient(client, ML_ORIGIN), - configs.get(0), - false, // missing docs are not an error - null, // if download is in progress, don't wait for it to complete - ActionListener.wrap(modelIdAndLength -> { - configs.get(0).setFullDefinition(modelIdAndLength.v2() > 0); - listener.onResponse(responseBuilder.setModels(configs).build()); - }, listener::onFailure) - ); - }, listener::onFailure); - - ActionListener>>> idExpansionListener = ActionListener.wrap(totalAndIds -> { - responseBuilder.setTotalCount(totalAndIds.v1()); - - if (totalAndIds.v2().isEmpty()) { - listener.onResponse(responseBuilder.build()); - return; - } - - if (request.getIncludes().isIncludeModelDefinition() && totalAndIds.v2().size() > 1) { - listener.onFailure(ExceptionsHelper.badRequestException(Messages.INFERENCE_TOO_MANY_DEFINITIONS_REQUESTED)); - return; - } - - if (request.getIncludes().isIncludeDefinitionStatus() && totalAndIds.v2().size() > 1) { - listener.onFailure( - ExceptionsHelper.badRequestException( - "Getting the model download status is not supported when getting more than one model" - ) - ); - return; - } - - if (request.getIncludes().isIncludeModelDefinition()) { - Map.Entry> modelIdAndAliases = totalAndIds.v2().entrySet().iterator().next(); - provider.getTrainedModel( - modelIdAndAliases.getKey(), - modelIdAndAliases.getValue(), - request.getIncludes(), - parentTaskId, - ActionListener.wrap( - config -> getModelDefinitionStatusListener.onResponse(Collections.singletonList(config)), - getModelDefinitionStatusListener::onFailure - ) - ); - } else { - provider.getTrainedModels( - totalAndIds.v2(), - request.getIncludes(), - request.isAllowNoResources(), - parentTaskId, - getModelDefinitionStatusListener - ); - } - }, listener::onFailure); provider.expandIds( request.getResourceId(), request.isAllowNoResources(), @@ -149,7 +72,82 @@ protected void doExecute(Task task, Request request, ActionListener li ModelAliasMetadata.fromState(clusterService.state()), parentTaskId, Collections.emptySet(), - idExpansionListener + listener.delegateFailureAndWrap((delegate, totalAndIds) -> { + Response.Builder responseBuilder = Response.builder(); + responseBuilder.setTotalCount(totalAndIds.v1()); + + if (totalAndIds.v2().isEmpty()) { + delegate.onResponse(responseBuilder.build()); + return; + } + + if (request.getIncludes().isIncludeModelDefinition() && totalAndIds.v2().size() > 1) { + delegate.onFailure(ExceptionsHelper.badRequestException(Messages.INFERENCE_TOO_MANY_DEFINITIONS_REQUESTED)); + return; + } + + if (request.getIncludes().isIncludeDefinitionStatus() && totalAndIds.v2().size() > 1) { + delegate.onFailure( + ExceptionsHelper.badRequestException( + "Getting the model download status is not supported when getting more than one model" + ) + ); + return; + } + + ActionListener> getModelDefinitionStatusListener = delegate.delegateFailureAndWrap( + (delegate2, configs) -> { + if (request.getIncludes().isIncludeDefinitionStatus() == false) { + delegate2.onResponse(responseBuilder.setModels(configs).build()); + return; + } + + assert configs.size() <= 1; + if (configs.isEmpty()) { + delegate2.onResponse(responseBuilder.setModels(configs).build()); + return; + } + + if (configs.get(0).getModelType() != TrainedModelType.PYTORCH) { + delegate2.onFailure( + ExceptionsHelper.badRequestException("Definition status is only relevant to PyTorch model types") + ); + return; + } + + TransportStartTrainedModelDeploymentAction.checkFullModelDefinitionIsPresent( + new OriginSettingClient(client, ML_ORIGIN), + configs.get(0), + false, // missing docs are not an error + null, // if download is in progress, don't wait for it to complete + delegate2.delegateFailureAndWrap((l, modelIdAndLength) -> { + configs.get(0).setFullDefinition(modelIdAndLength.v2() > 0); + l.onResponse(responseBuilder.setModels(configs).build()); + }) + ); + } + ); + if (request.getIncludes().isIncludeModelDefinition()) { + Map.Entry> modelIdAndAliases = totalAndIds.v2().entrySet().iterator().next(); + provider.getTrainedModel( + modelIdAndAliases.getKey(), + modelIdAndAliases.getValue(), + request.getIncludes(), + parentTaskId, + getModelDefinitionStatusListener.delegateFailureAndWrap( + (l, config) -> l.onResponse(Collections.singletonList(config)) + ) + ); + } else { + provider.getTrainedModels( + totalAndIds.v2(), + request.getIncludes(), + request.isAllowNoResources(), + parentTaskId, + getModelDefinitionStatusListener + ); + } + }) ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java index 0265faaeeb1d6..3223a7c7863f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java @@ -95,38 +95,34 @@ protected void masterOperation( ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, task.getParentTaskId()); - ActionListener nodeStatsListener = ActionListener.wrap(nodesStatsResponse -> { - TrainedModelCacheInfoAction.Request trainedModelCacheInfoRequest = new TrainedModelCacheInfoAction.Request( - nodesStatsResponse.getNodes().stream().map(NodeStats::getNode).toArray(DiscoveryNode[]::new) - ).timeout(request.timeout()); - - parentTaskClient.execute( - TrainedModelCacheInfoAction.INSTANCE, - trainedModelCacheInfoRequest, - ActionListener.wrap( - trainedModelCacheInfoResponse -> handleResponses( - state, - clusterSettings, - nodesStatsResponse, - trainedModelCacheInfoResponse, - listener - ), - listener::onFailure - ) - ); - }, listener::onFailure); - // Next get node stats related to the OS and JVM - ActionListener memoryTrackerRefreshListener = ActionListener.wrap( - r -> parentTaskClient.admin() + ActionListener memoryTrackerRefreshListener = listener.delegateFailureAndWrap( + (delegate, r) -> parentTaskClient.admin() .cluster() .prepareNodesStats(nodeIds) .clear() .setOs(true) .setJvm(true) .setTimeout(request.timeout()) - .execute(nodeStatsListener), - listener::onFailure + .execute(delegate.delegateFailureAndWrap((delegate2, nodesStatsResponse) -> { + TrainedModelCacheInfoAction.Request trainedModelCacheInfoRequest = new TrainedModelCacheInfoAction.Request( + nodesStatsResponse.getNodes().stream().map(NodeStats::getNode).toArray(DiscoveryNode[]::new) + ).timeout(request.timeout()); + + parentTaskClient.execute( + TrainedModelCacheInfoAction.INSTANCE, + trainedModelCacheInfoRequest, + delegate2.delegateFailureAndWrap( + (l, trainedModelCacheInfoResponse) -> handleResponses( + state, + clusterSettings, + nodesStatsResponse, + trainedModelCacheInfoResponse, + l + ) + ) + ); + })) ); // If the memory tracker has never been refreshed, do that first diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java index cac9d88256696..16a0f85028b85 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java @@ -289,17 +289,17 @@ public InferencePipelineAggregationBuilder rewrite(QueryRewriteContext context) privRequest.indexPrivileges(new RoleDescriptor.IndicesPrivileges[] {}); privRequest.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[] {}); - ActionListener privResponseListener = ActionListener.wrap(r -> { + ActionListener privResponseListener = listener.delegateFailureAndWrap((l, r) -> { if (r.isCompleteMatch()) { - modelLoadAction.accept(client, listener); + modelLoadAction.accept(client, l); } else { - listener.onFailure( + l.onFailure( Exceptions.authorizationError( "user [" + username + "] does not have the privilege to get trained models so cannot use ml inference" ) ); } - }, listener::onFailure); + }); client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java index defbc01a89d48..1e4db8aff4559 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java @@ -9,15 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; -import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; -import org.elasticsearch.xpack.ml.job.persistence.RestartTimeInfo; import java.util.Objects; -import java.util.function.Consumer; public class DatafeedContextProvider { @@ -38,27 +34,19 @@ public DatafeedContextProvider( public void buildDatafeedContext(String datafeedId, ActionListener listener) { DatafeedContext.Builder context = DatafeedContext.builder(); - Consumer timingStatsListener = timingStats -> { - context.setTimingStats(timingStats); - listener.onResponse(context.build()); - }; - - ActionListener restartTimeInfoListener = ActionListener.wrap(restartTimeInfo -> { - context.setRestartTimeInfo(restartTimeInfo); - resultsProvider.datafeedTimingStats(context.getJob().getId(), timingStatsListener, listener::onFailure); - }, listener::onFailure); - - ActionListener jobConfigListener = ActionListener.wrap(jobBuilder -> { - context.setJob(jobBuilder.build()); - resultsProvider.getRestartTimeInfo(jobBuilder.getId(), restartTimeInfoListener); - }, listener::onFailure); - - ActionListener datafeedListener = ActionListener.wrap(datafeedConfigBuilder -> { + datafeedConfigProvider.getDatafeedConfig(datafeedId, null, listener.delegateFailureAndWrap((delegate1, datafeedConfigBuilder) -> { DatafeedConfig datafeedConfig = datafeedConfigBuilder.build(); context.setDatafeedConfig(datafeedConfig); - jobConfigProvider.getJob(datafeedConfig.getJobId(), null, jobConfigListener); - }, listener::onFailure); - - datafeedConfigProvider.getDatafeedConfig(datafeedId, null, datafeedListener); + jobConfigProvider.getJob(datafeedConfig.getJobId(), null, delegate1.delegateFailureAndWrap((delegate2, jobBuilder) -> { + context.setJob(jobBuilder.build()); + resultsProvider.getRestartTimeInfo(jobBuilder.getId(), delegate2.delegateFailureAndWrap((delegate3, restartTimeInfo) -> { + context.setRestartTimeInfo(restartTimeInfo); + resultsProvider.datafeedTimingStats(context.getJob().getId(), timingStats -> { + context.setTimingStats(timingStats); + delegate3.onResponse(context.build()); + }, delegate3::onFailure); + })); + })); + })); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index fbabc9903c4cc..20da61a3d6910 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -351,14 +351,20 @@ public void onResponse(GetResponse getResponse) { return; } - ActionListener validatedListener = ActionListener.wrap( - ok -> indexUpdatedConfig(updatedConfig, seqNo, primaryTerm, ActionListener.wrap(indexResponse -> { - assert indexResponse.getResult() == DocWriteResponse.Result.UPDATED; - delegate.onResponse(updatedConfig); - }, delegate::onFailure)), - delegate::onFailure + validator.accept( + updatedConfig, + delegate.delegateFailureAndWrap( + (l, ok) -> indexUpdatedConfig( + updatedConfig, + seqNo, + primaryTerm, + l.delegateFailureAndWrap((ll, indexResponse) -> { + assert indexResponse.getResult() == DocWriteResponse.Result.UPDATED; + ll.onResponse(updatedConfig); + }) + ) + ) ); - validator.accept(updatedConfig, validatedListener); } } ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index d370e8af52549..203474a3c9d0a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -169,29 +169,21 @@ public void execute(DataFrameAnalyticsTask task, ClusterState clusterState, Time }, task::setFailed); - // Retrieve configuration - ActionListener statsIndexListener = configListener.delegateFailureAndWrap( - (l, aBoolean) -> configProvider.get(task.getParams().getId(), l) - ); - - // Make sure the stats index and alias exist - ActionListener stateAliasListener = ActionListener.wrap( - aBoolean -> createStatsIndexAndUpdateMappingsIfNecessary( - new ParentTaskAssigningClient(client, task.getParentTaskId()), - clusterState, - masterNodeTimeout, - statsIndexListener - ), - configListener::onFailure - ); - // Make sure the state index and alias exist AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessaryAndWaitForYellow( new ParentTaskAssigningClient(client, task.getParentTaskId()), clusterState, expressionResolver, masterNodeTimeout, - stateAliasListener + configListener.delegateFailureAndWrap( + (delegate, aBoolean) -> createStatsIndexAndUpdateMappingsIfNecessary( + new ParentTaskAssigningClient(client, task.getParentTaskId()), + clusterState, + masterNodeTimeout, + // Retrieve configuration + delegate.delegateFailureAndWrap((l, ignored) -> configProvider.get(task.getParams().getId(), l)) + ) + ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java index 280984feab4d4..ed12f54ab86b5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java @@ -46,7 +46,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -110,18 +109,22 @@ public static void createDestinationIndex( String[] destIndexAllowedSettings, ActionListener listener ) { - ActionListener createIndexRequestListener = ActionListener.wrap(createIndexRequest -> { - ClientHelper.executeWithHeadersAsync( - analyticsConfig.getHeaders(), - ClientHelper.ML_ORIGIN, - client, - TransportCreateIndexAction.TYPE, - createIndexRequest, - listener - ); - }, listener::onFailure); - - prepareCreateIndexRequest(client, clock, analyticsConfig, destIndexAllowedSettings, createIndexRequestListener); + prepareCreateIndexRequest( + client, + clock, + analyticsConfig, + destIndexAllowedSettings, + listener.delegateFailureAndWrap( + (l, createIndexRequest) -> ClientHelper.executeWithHeadersAsync( + analyticsConfig.getHeaders(), + ClientHelper.ML_ORIGIN, + client, + TransportCreateIndexAction.TYPE, + createIndexRequest, + l + ) + ) + ); } private static void prepareCreateIndexRequest( @@ -131,30 +134,6 @@ private static void prepareCreateIndexRequest( String[] destIndexAllowedSettings, ActionListener listener ) { - AtomicReference settingsHolder = new AtomicReference<>(); - AtomicReference mappingsHolder = new AtomicReference<>(); - - ActionListener fieldCapabilitiesListener = listener.delegateFailureAndWrap( - (l, fieldCapabilitiesResponse) -> l.onResponse( - createIndexRequest(clock, config, settingsHolder.get(), mappingsHolder.get(), fieldCapabilitiesResponse) - ) - ); - - ActionListener mappingsListener = ActionListener.wrap(mappings -> { - mappingsHolder.set(mappings); - getFieldCapsForRequiredFields(client, config, fieldCapabilitiesListener); - }, listener::onFailure); - - ActionListener settingsListener = ActionListener.wrap(settings -> { - settingsHolder.set(settings); - MappingsMerger.mergeMappings(client, config.getHeaders(), config.getSource(), mappingsListener); - }, listener::onFailure); - - ActionListener getSettingsResponseListener = ActionListener.wrap( - settingsResponse -> settingsListener.onResponse(settings(settingsResponse, destIndexAllowedSettings)), - listener::onFailure - ); - GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(config.getSource().getIndex()) .indicesOptions(IndicesOptions.lenientExpandOpen()) .names(PRESERVED_SETTINGS); @@ -164,7 +143,25 @@ private static void prepareCreateIndexRequest( client, GetSettingsAction.INSTANCE, getSettingsRequest, - getSettingsResponseListener + listener.delegateFailureAndWrap((delegate, settingsResponse) -> { + final Settings settings = settings(settingsResponse, destIndexAllowedSettings); + MappingsMerger.mergeMappings( + client, + config.getHeaders(), + config.getSource(), + delegate.delegateFailureAndWrap( + (l, mappings) -> getFieldCapsForRequiredFields( + client, + config, + l.delegateFailureAndWrap( + (ll, fieldCapabilitiesResponse) -> ll.onResponse( + createIndexRequest(clock, config, settings, mappings, fieldCapabilitiesResponse) + ) + ) + ) + ) + ); + }) ); } @@ -355,7 +352,7 @@ public static void updateMappingsToDestIndex( // Verify that the results field does not exist in the dest index checkResultsFieldIsNotPresentInProperties(config, destPropertiesAsMap); - ActionListener fieldCapabilitiesListener = ActionListener.wrap(fieldCapabilitiesResponse -> { + getFieldCapsForRequiredFields(client, config, listener.delegateFailureAndWrap((delegate, fieldCapabilitiesResponse) -> { Map addedMappings = new HashMap<>(); // Determine mappings to be added to the destination index @@ -374,11 +371,9 @@ public static void updateMappingsToDestIndex( client, TransportPutMappingAction.TYPE, putMappingRequest, - listener + delegate ); - }, listener::onFailure); - - getFieldCapsForRequiredFields(client, config, fieldCapabilitiesListener); + })); } private static void checkResultsFieldIsNotPresentInProperties(DataFrameAnalyticsConfig config, Map properties) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java index 8c7d490f37787..47071c80b90ee 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java @@ -142,7 +142,7 @@ private void exists(String jobId, ActionListener listener) { private void deleteLeftOverDocs(DataFrameAnalyticsConfig config, TimeValue timeout, ActionListener listener) { DataFrameAnalyticsDeleter deleter = new DataFrameAnalyticsDeleter(client, auditor); - deleter.deleteAllDocuments(config, timeout, ActionListener.wrap(r -> listener.onResponse(r), e -> { + deleter.deleteAllDocuments(config, timeout, ActionListener.wrap(listener::onResponse, e -> { if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { // This is expected listener.onResponse(AcknowledgedResponse.TRUE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index 17b931d971188..9187969fc25a4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -424,7 +424,7 @@ public void clearCache(TrainedModelDeploymentTask task, TimeValue timeout, Actio timeout, processContext, threadPool, - ActionListener.wrap(b -> listener.onResponse(AcknowledgedResponse.TRUE), listener::onFailure) + listener.delegateFailureAndWrap((l, b) -> l.onResponse(AcknowledgedResponse.TRUE)) ); executePyTorchAction(processContext, PriorityProcessWorkerExecutorService.RequestPriority.HIGHEST, controlMessageAction); @@ -533,18 +533,18 @@ synchronized void startAndLoad(TrainedModelLocation modelLocation, ActionListene startTime = Instant.now(); logger.debug("[{}] process started", task.getDeploymentId()); try { - loadModel(modelLocation, ActionListener.wrap(success -> { + loadModel(modelLocation, loadedListener.delegateFailureAndWrap((delegate, success) -> { if (isStopped) { logger.debug("[{}] model loaded but process is stopped", task.getDeploymentId()); killProcessIfPresent(); - loadedListener.onFailure(new IllegalStateException("model loaded but process is stopped")); + delegate.onFailure(new IllegalStateException("model loaded but process is stopped")); return; } logger.debug("[{}] model loaded, starting priority process worker thread", task.getDeploymentId()); startPriorityProcessWorker(); - loadedListener.onResponse(success); - }, loadedListener::onFailure)); + delegate.onResponse(success); + })); } catch (Exception e) { loadedListener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java index 917d5881ae130..050d01198b910 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemover.java @@ -131,18 +131,18 @@ private static DeleteByQueryRequest createDBQRequest(Job job, float requestsPerS @Override void calcCutoffEpochMs(String jobId, long retentionDays, ActionListener listener) { - ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), - listener - ); - latestBucketTime(client, getParentTaskId(), jobId, ActionListener.wrap(latestTime -> { + latestBucketTime(client, getParentTaskId(), jobId, listener.delegateFailureAndWrap((l, latestTime) -> { + ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), + l + ); if (latestTime == null) { threadedActionListener.onResponse(null); } else { long cutoff = latestTime - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis(); threadedActionListener.onResponse(new CutoffDetails(latestTime, cutoff)); } - }, listener::onFailure)); + })); } private void auditAnnotationsWereDeleted(String jobId, long cutoffEpochMs) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 677e71b304cb9..886c19a65a4d0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -77,15 +77,9 @@ public void remove(float requestsPerSec, ActionListener listener, Boole LOGGER.debug("Removing forecasts that expire before [{}]", cutoffEpochMs); ActionListener forecastStatsHandler = ActionListener.wrap( searchResponse -> deleteForecasts(searchResponse, requestsPerSec, listener, isTimedOutSupplier), - e -> { - listener.onFailure( - new ElasticsearchStatusException( - "An error occurred while searching forecasts to delete", - RestStatus.TOO_MANY_REQUESTS, - e - ) - ); - } + e -> listener.onFailure( + new ElasticsearchStatusException("An error occurred while searching forecasts to delete", RestStatus.TOO_MANY_REQUESTS, e) + ) ); SearchSourceBuilder source = new SearchSourceBuilder(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java index 27bd3c926d944..cbd505c293c86 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemover.java @@ -100,19 +100,18 @@ Long getRetentionDays(Job job) { @Override void calcCutoffEpochMs(String jobId, long retentionDays, ActionListener listener) { - ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), - listener - ); - - latestSnapshotTimeStamp(jobId, ActionListener.wrap(latestTime -> { + latestSnapshotTimeStamp(jobId, listener.delegateFailureAndWrap((l, latestTime) -> { + ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), + l + ); if (latestTime == null) { threadedActionListener.onResponse(null); } else { long cutoff = latestTime - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis(); threadedActionListener.onResponse(new CutoffDetails(latestTime, cutoff)); } - }, listener::onFailure)); + })); } private void latestSnapshotTimeStamp(String jobId, ActionListener listener) { @@ -135,22 +134,22 @@ private void latestSnapshotTimeStamp(String jobId, ActionListener listener searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)); searchRequest.setParentTask(getParentTaskId()); - client.search(searchRequest, ActionListener.wrap(response -> { + client.search(searchRequest, listener.delegateFailureAndWrap((delegate, response) -> { SearchHit[] hits = response.getHits().getHits(); if (hits.length == 0) { // no snapshots found - listener.onResponse(null); + delegate.onResponse(null); } else { String timestamp = stringFieldValueOrNull(hits[0], ModelSnapshot.TIMESTAMP.getPreferredName()); if (timestamp == null) { LOGGER.warn("Model snapshot document [{}] has a null timestamp field", hits[0].getId()); - listener.onResponse(null); + delegate.onResponse(null); } else { long timestampMs = TimeUtils.parseToEpochMs(timestamp); - listener.onResponse(timestampMs); + delegate.onResponse(timestampMs); } } - }, listener::onFailure)); + })); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java index 35e16b9fa8b88..be0bb53d454fe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemover.java @@ -165,18 +165,18 @@ private static DeleteByQueryRequest createDBQRequest(Job job, float requestsPerS @Override void calcCutoffEpochMs(String jobId, long retentionDays, ActionListener listener) { - ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( - threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), - listener - ); - latestBucketTime(client, getParentTaskId(), jobId, ActionListener.wrap(latestTime -> { + latestBucketTime(client, getParentTaskId(), jobId, listener.delegateFailureAndWrap((l, latestTime) -> { + ThreadedActionListener threadedActionListener = new ThreadedActionListener<>( + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME), + l + ); if (latestTime == null) { threadedActionListener.onResponse(null); } else { long cutoff = latestTime - new TimeValue(retentionDays, TimeUnit.DAYS).getMillis(); threadedActionListener.onResponse(new CutoffDetails(latestTime, cutoff)); } - }, listener::onFailure)); + })); } static void latestBucketTime(OriginSettingClient client, TaskId parentTaskId, String jobId, ActionListener listener) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java index 3f502c4d95cc9..d1f1d0d506c85 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java @@ -366,7 +366,7 @@ public void refreshAnomalyDetectorJobMemoryAndAllOthers(String jobId, ActionList refresh( clusterService.state().getMetadata().custom(PersistentTasksCustomMetadata.TYPE), Collections.singleton(jobId), - ActionListener.wrap(aVoid -> refreshAnomalyDetectorJobMemory(jobId, listener), listener::onFailure) + listener.delegateFailureAndWrap((l, aVoid) -> refreshAnomalyDetectorJobMemory(jobId, l)) ); } @@ -503,15 +503,15 @@ private void refreshAllDataFrameAnalyticsJobTasks( .map(task -> ((StartDataFrameAnalyticsAction.TaskParams) task.getParams()).getId()) .collect(Collectors.toSet()); - configProvider.getConfigsForJobsWithTasksLeniently(jobsWithTasks, ActionListener.wrap(analyticsConfigs -> { + configProvider.getConfigsForJobsWithTasksLeniently(jobsWithTasks, listener.delegateFailureAndWrap((delegate, analyticsConfigs) -> { for (DataFrameAnalyticsConfig analyticsConfig : analyticsConfigs) { memoryRequirementByDataFrameAnalyticsJob.put( analyticsConfig.getId(), analyticsConfig.getModelMemoryLimit().getBytes() + DataFrameAnalyticsConfig.PROCESS_MEMORY_OVERHEAD.getBytes() ); } - listener.onResponse(null); - }, listener::onFailure)); + delegate.onResponse(null); + })); } /** diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java index 39f1ead7e24e0..59e0093abfba9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredAnnotationsRemoverTests.java @@ -144,6 +144,7 @@ public void testCalcCutoffEpochMs() { List jobs = Collections.singletonList(JobTests.buildJobBuilder(jobId).setResultsRetentionDays(1L).build()); ActionListener cutoffListener = mock(ActionListener.class); + when(cutoffListener.delegateFailureAndWrap(any())).thenCallRealMethod(); createExpiredAnnotationsRemover(jobs.iterator()).calcCutoffEpochMs(jobId, 1L, cutoffListener); long dayInMills = 60 * 60 * 24 * 1000; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java index 5b3168a425029..98dc3bf3ea84b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredModelSnapshotsRemoverTests.java @@ -271,6 +271,7 @@ public void testCalcCutoffEpochMs() { long retentionDays = 3L; ActionListener cutoffListener = mock(ActionListener.class); + when(cutoffListener.delegateFailureAndWrap(any())).thenCallRealMethod(); createExpiredModelSnapshotsRemover(Collections.emptyIterator()).calcCutoffEpochMs("job-1", retentionDays, cutoffListener); long dayInMills = 60 * 60 * 24 * 1000; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java index 4dbb4eda07b0a..9a768b7f635bd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/ExpiredResultsRemoverTests.java @@ -144,6 +144,7 @@ public void testCalcCutoffEpochMs() { List jobs = Collections.singletonList(JobTests.buildJobBuilder(jobId).setResultsRetentionDays(1L).build()); ActionListener cutoffListener = mock(ActionListener.class); + when(cutoffListener.delegateFailureAndWrap(any())).thenCallRealMethod(); createExpiredResultsRemover(jobs.iterator()).calcCutoffEpochMs(jobId, 1L, cutoffListener); long dayInMills = 60 * 60 * 24 * 1000; From 4ddb6406cdbe613ca44cea200675ef0ac2407186 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 2 Apr 2024 02:41:22 +0200 Subject: [PATCH 59/69] Remove o.e.painless.toxcontent.UserTreeToXContent (#106935) This was never used -> remove it and its test. --- .../toxcontent/UserTreeToXContent.java | 685 ------------------ .../painless/ToXContentTests.java | 124 ---- 2 files changed, 809 deletions(-) delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/toxcontent/UserTreeToXContent.java delete mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/ToXContentTests.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/toxcontent/UserTreeToXContent.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/toxcontent/UserTreeToXContent.java deleted file mode 100644 index 2756419e68e16..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/toxcontent/UserTreeToXContent.java +++ /dev/null @@ -1,685 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.painless.toxcontent; - -import org.elasticsearch.painless.Operation; -import org.elasticsearch.painless.node.AExpression; -import org.elasticsearch.painless.node.ANode; -import org.elasticsearch.painless.node.EAssignment; -import org.elasticsearch.painless.node.EBinary; -import org.elasticsearch.painless.node.EBooleanComp; -import org.elasticsearch.painless.node.EBooleanConstant; -import org.elasticsearch.painless.node.EBrace; -import org.elasticsearch.painless.node.ECall; -import org.elasticsearch.painless.node.ECallLocal; -import org.elasticsearch.painless.node.EComp; -import org.elasticsearch.painless.node.EConditional; -import org.elasticsearch.painless.node.EDecimal; -import org.elasticsearch.painless.node.EDot; -import org.elasticsearch.painless.node.EElvis; -import org.elasticsearch.painless.node.EExplicit; -import org.elasticsearch.painless.node.EFunctionRef; -import org.elasticsearch.painless.node.EInstanceof; -import org.elasticsearch.painless.node.ELambda; -import org.elasticsearch.painless.node.EListInit; -import org.elasticsearch.painless.node.EMapInit; -import org.elasticsearch.painless.node.ENewArray; -import org.elasticsearch.painless.node.ENewArrayFunctionRef; -import org.elasticsearch.painless.node.ENewObj; -import org.elasticsearch.painless.node.ENull; -import org.elasticsearch.painless.node.ENumeric; -import org.elasticsearch.painless.node.ERegex; -import org.elasticsearch.painless.node.EString; -import org.elasticsearch.painless.node.ESymbol; -import org.elasticsearch.painless.node.EUnary; -import org.elasticsearch.painless.node.SBlock; -import org.elasticsearch.painless.node.SBreak; -import org.elasticsearch.painless.node.SCatch; -import org.elasticsearch.painless.node.SClass; -import org.elasticsearch.painless.node.SContinue; -import org.elasticsearch.painless.node.SDeclBlock; -import org.elasticsearch.painless.node.SDeclaration; -import org.elasticsearch.painless.node.SDo; -import org.elasticsearch.painless.node.SEach; -import org.elasticsearch.painless.node.SExpression; -import org.elasticsearch.painless.node.SFor; -import org.elasticsearch.painless.node.SFunction; -import org.elasticsearch.painless.node.SIf; -import org.elasticsearch.painless.node.SIfElse; -import org.elasticsearch.painless.node.SReturn; -import org.elasticsearch.painless.node.SThrow; -import org.elasticsearch.painless.node.STry; -import org.elasticsearch.painless.node.SWhile; -import org.elasticsearch.painless.phase.UserTreeBaseVisitor; -import org.elasticsearch.painless.symbol.Decorator.Condition; -import org.elasticsearch.painless.symbol.Decorator.Decoration; -import org.elasticsearch.painless.symbol.ScriptScope; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -/** - * Serialize the user tree - */ -public class UserTreeToXContent extends UserTreeBaseVisitor { - public final XContentBuilderWrapper builder; - - public UserTreeToXContent(XContentBuilder builder) { - this.builder = new XContentBuilderWrapper(Objects.requireNonNull(builder)); - } - - public UserTreeToXContent() { - this.builder = new XContentBuilderWrapper(); - } - - static final class Fields { - static final String NODE = "node"; - static final String LOCATION = "location"; - static final String LEFT = "left"; - static final String RIGHT = "right"; - static final String BLOCK = "block"; - static final String CONDITION = "condition"; - static final String TYPE = "type"; - static final String SYMBOL = "symbol"; - static final String DECORATIONS = "decorations"; - static final String CONDITIONS = "conditions"; - } - - @Override - public void visitClass(SClass userClassNode, ScriptScope scope) { - start(userClassNode); - - builder.field("source", scope.getScriptSource()); - builder.startArray("functions"); - userClassNode.visitChildren(this, scope); - builder.endArray(); - - end(userClassNode, scope); - } - - @Override - public void visitFunction(SFunction userFunctionNode, ScriptScope scope) { - start(userFunctionNode); - - builder.field("name", userFunctionNode.getFunctionName()); - builder.field("returns", userFunctionNode.getReturnCanonicalTypeName()); - if (userFunctionNode.getParameterNames().isEmpty() == false) { - builder.field("parameters", userFunctionNode.getParameterNames()); - } - if (userFunctionNode.getCanonicalTypeNameParameters().isEmpty() == false) { - builder.field("parameterTypes", userFunctionNode.getCanonicalTypeNameParameters()); - } - builder.field("isInternal", userFunctionNode.isInternal()); - builder.field("isStatic", userFunctionNode.isStatic()); - builder.field("isSynthetic", userFunctionNode.isSynthetic()); - builder.field("isAutoReturnEnabled", userFunctionNode.isAutoReturnEnabled()); - - builder.startArray(Fields.BLOCK); - userFunctionNode.visitChildren(this, scope); - builder.endArray(); - - end(userFunctionNode, scope); - } - - @Override - public void visitBlock(SBlock userBlockNode, ScriptScope scope) { - start(userBlockNode); - - builder.startArray("statements"); - userBlockNode.visitChildren(this, scope); - builder.endArray(); - - end(userBlockNode, scope); - } - - @Override - public void visitIf(SIf userIfNode, ScriptScope scope) { - start(userIfNode); - - builder.startArray(Fields.CONDITION); - userIfNode.getConditionNode().visit(this, scope); - builder.endArray(); - - block("ifBlock", userIfNode.getIfBlockNode(), scope); - - end(userIfNode, scope); - } - - @Override - public void visitIfElse(SIfElse userIfElseNode, ScriptScope scope) { - start(userIfElseNode); - - builder.startArray(Fields.CONDITION); - userIfElseNode.getConditionNode().visit(this, scope); - builder.endArray(); - - block("ifBlock", userIfElseNode.getIfBlockNode(), scope); - block("elseBlock", userIfElseNode.getElseBlockNode(), scope); - - end(userIfElseNode, scope); - } - - @Override - public void visitWhile(SWhile userWhileNode, ScriptScope scope) { - start(userWhileNode); - loop(userWhileNode.getConditionNode(), userWhileNode.getBlockNode(), scope); - end(userWhileNode, scope); - } - - @Override - public void visitDo(SDo userDoNode, ScriptScope scope) { - start(userDoNode); - loop(userDoNode.getConditionNode(), userDoNode.getBlockNode(), scope); - end(userDoNode, scope); - } - - @Override - public void visitFor(SFor userForNode, ScriptScope scope) { - start(userForNode); - - ANode initializerNode = userForNode.getInitializerNode(); - builder.startArray("initializer"); - if (initializerNode != null) { - initializerNode.visit(this, scope); - } - builder.endArray(); - - builder.startArray("condition"); - AExpression conditionNode = userForNode.getConditionNode(); - if (conditionNode != null) { - conditionNode.visit(this, scope); - } - builder.endArray(); - - builder.startArray("afterthought"); - AExpression afterthoughtNode = userForNode.getAfterthoughtNode(); - if (afterthoughtNode != null) { - afterthoughtNode.visit(this, scope); - } - builder.endArray(); - - block(userForNode.getBlockNode(), scope); - - end(userForNode, scope); - } - - @Override - public void visitEach(SEach userEachNode, ScriptScope scope) { - start(userEachNode); - - builder.field(Fields.TYPE, userEachNode.getCanonicalTypeName()); - builder.field(Fields.SYMBOL, userEachNode.getSymbol()); - - builder.startArray("iterable"); - userEachNode.getIterableNode().visitChildren(this, scope); - builder.endArray(); - - block(userEachNode.getBlockNode(), scope); - - end(userEachNode, scope); - } - - @Override - public void visitDeclBlock(SDeclBlock userDeclBlockNode, ScriptScope scope) { - start(userDeclBlockNode); - - builder.startArray("declarations"); - userDeclBlockNode.visitChildren(this, scope); - builder.endArray(); - - end(userDeclBlockNode, scope); - } - - @Override - public void visitDeclaration(SDeclaration userDeclarationNode, ScriptScope scope) { - start(userDeclarationNode); - - builder.field(Fields.TYPE, userDeclarationNode.getCanonicalTypeName()); - builder.field(Fields.SYMBOL, userDeclarationNode.getSymbol()); - - builder.startArray("value"); - userDeclarationNode.visitChildren(this, scope); - builder.endArray(); - - end(userDeclarationNode, scope); - } - - @Override - public void visitReturn(SReturn userReturnNode, ScriptScope scope) { - start(userReturnNode); - - builder.startArray("value"); - userReturnNode.visitChildren(this, scope); - builder.endArray(); - - end(userReturnNode, scope); - } - - @Override - public void visitExpression(SExpression userExpressionNode, ScriptScope scope) { - start(userExpressionNode); - - builder.startArray("statement"); - userExpressionNode.visitChildren(this, scope); - builder.endArray(); - - end(userExpressionNode, scope); - } - - @Override - public void visitTry(STry userTryNode, ScriptScope scope) { - start(userTryNode); - - block(userTryNode.getBlockNode(), scope); - - builder.startArray("catch"); - for (SCatch catchNode : userTryNode.getCatchNodes()) { - catchNode.visit(this, scope); - } - builder.endArray(); - - end(userTryNode, scope); - } - - @Override - public void visitCatch(SCatch userCatchNode, ScriptScope scope) { - start(userCatchNode); - - builder.field("exception", userCatchNode.getBaseException()); - builder.field(Fields.TYPE, userCatchNode.getCanonicalTypeName()); - builder.field(Fields.SYMBOL, userCatchNode.getSymbol()); - - builder.startArray(Fields.BLOCK); - userCatchNode.visitChildren(this, scope); - builder.endArray(); - - end(userCatchNode, scope); - } - - @Override - public void visitThrow(SThrow userThrowNode, ScriptScope scope) { - start(userThrowNode); - - builder.startArray("expression"); - userThrowNode.visitChildren(this, scope); - builder.endArray(); - - end(userThrowNode, scope); - } - - @Override - public void visitContinue(SContinue userContinueNode, ScriptScope scope) { - start(userContinueNode); - end(userContinueNode, scope); - } - - @Override - public void visitBreak(SBreak userBreakNode, ScriptScope scope) { - start(userBreakNode); - end(userBreakNode, scope); - } - - @Override - public void visitAssignment(EAssignment userAssignmentNode, ScriptScope scope) { - start(userAssignmentNode); - // TODO(stu): why would operation be null? - builder.field("postIfRead", userAssignmentNode.postIfRead()); - binaryOperation(userAssignmentNode.getOperation(), userAssignmentNode.getLeftNode(), userAssignmentNode.getRightNode(), scope); - end(userAssignmentNode, scope); - } - - @Override - public void visitUnary(EUnary userUnaryNode, ScriptScope scope) { - start(userUnaryNode); - - operation(userUnaryNode.getOperation()); - - builder.startArray("child"); - userUnaryNode.visitChildren(this, scope); - builder.endArray(); - - end(userUnaryNode, scope); - } - - @Override - public void visitBinary(EBinary userBinaryNode, ScriptScope scope) { - start(userBinaryNode); - binaryOperation(userBinaryNode.getOperation(), userBinaryNode.getLeftNode(), userBinaryNode.getRightNode(), scope); - end(userBinaryNode, scope); - } - - @Override - public void visitBooleanComp(EBooleanComp userBooleanCompNode, ScriptScope scope) { - start(userBooleanCompNode); - binaryOperation(userBooleanCompNode.getOperation(), userBooleanCompNode.getLeftNode(), userBooleanCompNode.getRightNode(), scope); - end(userBooleanCompNode, scope); - } - - @Override - public void visitComp(EComp userCompNode, ScriptScope scope) { - start(userCompNode); - binaryOperation(userCompNode.getOperation(), userCompNode.getLeftNode(), userCompNode.getRightNode(), scope); - end(userCompNode, scope); - } - - @Override - public void visitExplicit(EExplicit userExplicitNode, ScriptScope scope) { - start(userExplicitNode); - - builder.field(Fields.TYPE, userExplicitNode.getCanonicalTypeName()); - builder.startArray("child"); - userExplicitNode.visitChildren(this, scope); - builder.endArray(); - - end(userExplicitNode, scope); - } - - @Override - public void visitInstanceof(EInstanceof userInstanceofNode, ScriptScope scope) { - start(userInstanceofNode); - - builder.field(Fields.TYPE, userInstanceofNode.getCanonicalTypeName()); - builder.startArray("child"); - userInstanceofNode.visitChildren(this, scope); - builder.endArray(); - - end(userInstanceofNode, scope); - } - - @Override - public void visitConditional(EConditional userConditionalNode, ScriptScope scope) { - start(userConditionalNode); - - builder.startArray("condition"); - userConditionalNode.getConditionNode().visit(this, scope); - builder.endArray(); - - builder.startArray("true"); - userConditionalNode.getTrueNode().visit(this, scope); - builder.endArray(); - - builder.startArray("false"); - userConditionalNode.getFalseNode().visit(this, scope); - builder.endArray(); - - end(userConditionalNode, scope); - } - - @Override - public void visitElvis(EElvis userElvisNode, ScriptScope scope) { - start(userElvisNode); - - builder.startArray(Fields.LEFT); - userElvisNode.getLeftNode().visit(this, scope); - builder.endArray(); - - builder.startArray(Fields.RIGHT); - userElvisNode.getRightNode().visit(this, scope); - builder.endArray(); - - end(userElvisNode, scope); - } - - @Override - public void visitListInit(EListInit userListInitNode, ScriptScope scope) { - start(userListInitNode); - builder.startArray("values"); - userListInitNode.visitChildren(this, scope); - builder.endArray(); - end(userListInitNode, scope); - } - - @Override - public void visitMapInit(EMapInit userMapInitNode, ScriptScope scope) { - start(userMapInitNode); - expressions("keys", userMapInitNode.getKeyNodes(), scope); - expressions("values", userMapInitNode.getValueNodes(), scope); - end(userMapInitNode, scope); - } - - @Override - public void visitNewArray(ENewArray userNewArrayNode, ScriptScope scope) { - start(userNewArrayNode); - builder.field(Fields.TYPE, userNewArrayNode.getCanonicalTypeName()); - builder.field("isInitializer", userNewArrayNode.isInitializer()); - expressions("values", userNewArrayNode.getValueNodes(), scope); - end(userNewArrayNode, scope); - } - - @Override - public void visitNewObj(ENewObj userNewObjNode, ScriptScope scope) { - start(userNewObjNode); - builder.field(Fields.TYPE, userNewObjNode.getCanonicalTypeName()); - arguments(userNewObjNode.getArgumentNodes(), scope); - end(userNewObjNode, scope); - } - - @Override - public void visitCallLocal(ECallLocal userCallLocalNode, ScriptScope scope) { - start(userCallLocalNode); - builder.field("methodName", userCallLocalNode.getMethodName()); - arguments(userCallLocalNode.getArgumentNodes(), scope); - end(userCallLocalNode, scope); - } - - @Override - public void visitBooleanConstant(EBooleanConstant userBooleanConstantNode, ScriptScope scope) { - start(userBooleanConstantNode); - builder.field("value", userBooleanConstantNode.getBool()); - end(userBooleanConstantNode, scope); - } - - @Override - public void visitNumeric(ENumeric userNumericNode, ScriptScope scope) { - start(userNumericNode); - builder.field("numeric", userNumericNode.getNumeric()); - builder.field("radix", userNumericNode.getRadix()); - end(userNumericNode, scope); - } - - @Override - public void visitDecimal(EDecimal userDecimalNode, ScriptScope scope) { - start(userDecimalNode); - builder.field("value", userDecimalNode.getDecimal()); - end(userDecimalNode, scope); - } - - @Override - public void visitString(EString userStringNode, ScriptScope scope) { - start(userStringNode); - builder.field("value", userStringNode.getString()); - end(userStringNode, scope); - } - - @Override - public void visitNull(ENull userNullNode, ScriptScope scope) { - start(userNullNode); - end(userNullNode, scope); - } - - @Override - public void visitRegex(ERegex userRegexNode, ScriptScope scope) { - start(userRegexNode); - builder.field("pattern", userRegexNode.getPattern()); - builder.field("flags", userRegexNode.getFlags()); - end(userRegexNode, scope); - } - - @Override - public void visitLambda(ELambda userLambdaNode, ScriptScope scope) { - start(userLambdaNode); - builder.field("types", userLambdaNode.getCanonicalTypeNameParameters()); - builder.field("parameters", userLambdaNode.getParameterNames()); - block(userLambdaNode.getBlockNode(), scope); - end(userLambdaNode, scope); - } - - @Override - public void visitFunctionRef(EFunctionRef userFunctionRefNode, ScriptScope scope) { - start(userFunctionRefNode); - builder.field(Fields.SYMBOL, userFunctionRefNode.getSymbol()); - builder.field("methodName", userFunctionRefNode.getMethodName()); - end(userFunctionRefNode, scope); - } - - @Override - public void visitNewArrayFunctionRef(ENewArrayFunctionRef userNewArrayFunctionRefNode, ScriptScope scope) { - start(userNewArrayFunctionRefNode); - builder.field(Fields.TYPE, userNewArrayFunctionRefNode.getCanonicalTypeName()); - end(userNewArrayFunctionRefNode, scope); - } - - @Override - public void visitSymbol(ESymbol userSymbolNode, ScriptScope scope) { - start(userSymbolNode); - builder.field(Fields.SYMBOL, userSymbolNode.getSymbol()); - end(userSymbolNode, scope); - } - - @Override - public void visitDot(EDot userDotNode, ScriptScope scope) { - start(userDotNode); - - builder.startArray("prefix"); - userDotNode.visitChildren(this, scope); - builder.endArray(); - - builder.field("index", userDotNode.getIndex()); - builder.field("nullSafe", userDotNode.isNullSafe()); - - end(userDotNode, scope); - } - - @Override - public void visitBrace(EBrace userBraceNode, ScriptScope scope) { - start(userBraceNode); - - builder.startArray("prefix"); - userBraceNode.getPrefixNode().visit(this, scope); - builder.endArray(); - - builder.startArray("index"); - userBraceNode.getIndexNode().visit(this, scope); - builder.endArray(); - - end(userBraceNode, scope); - } - - @Override - public void visitCall(ECall userCallNode, ScriptScope scope) { - start(userCallNode); - - builder.startArray("prefix"); - userCallNode.getPrefixNode().visitChildren(this, scope); - builder.endArray(); - - builder.field("isNullSafe", userCallNode.isNullSafe()); - builder.field("methodName", userCallNode.getMethodName()); - - arguments(userCallNode.getArgumentNodes(), scope); - - end(userCallNode, scope); - } - - private void start(ANode node) { - builder.startObject(); - builder.field(Fields.NODE, node.getClass().getSimpleName()); - builder.field(Fields.LOCATION, node.getLocation().getOffset()); - } - - private void end(ANode node, ScriptScope scope) { - decorations(node, scope); - builder.endObject(); - } - - private void block(String name, SBlock block, ScriptScope scope) { - builder.startArray(name); - if (block != null) { - block.visit(this, scope); - } - builder.endArray(); - } - - private void block(SBlock block, ScriptScope scope) { - block(Fields.BLOCK, block, scope); - } - - private void loop(AExpression condition, SBlock block, ScriptScope scope) { - builder.startArray(Fields.CONDITION); - condition.visit(this, scope); - builder.endArray(); - - block(block, scope); - } - - private void operation(Operation op) { - builder.startObject("operation"); - if (op != null) { - builder.field(Fields.SYMBOL, op.symbol); - builder.field("name", op.name); - } - builder.endObject(); - } - - private void binaryOperation(Operation op, AExpression left, AExpression right, ScriptScope scope) { - operation(op); - - builder.startArray(Fields.LEFT); - left.visit(this, scope); - builder.endArray(); - - builder.startArray(Fields.RIGHT); - right.visit(this, scope); - builder.endArray(); - } - - private void arguments(List arguments, ScriptScope scope) { - if (arguments.isEmpty() == false) { - expressions("arguments", arguments, scope); - } - } - - private void expressions(String name, List expressions, ScriptScope scope) { - if (expressions.isEmpty() == false) { - builder.startArray(name); - for (AExpression expression : expressions) { - expression.visit(this, scope); - } - builder.endArray(); - } - } - - private void decorations(ANode node, ScriptScope scope) { - Set> conditions = scope.getAllConditions(node.getIdentifier()); - if (conditions.isEmpty() == false) { - builder.field(Fields.CONDITIONS, conditions.stream().map(Class::getSimpleName).sorted().collect(Collectors.toList())); - } - - Map, Decoration> decorations = scope.getAllDecorations(node.getIdentifier()); - if (decorations.isEmpty() == false) { - builder.startArray(Fields.DECORATIONS); - - decorations.keySet() - .stream() - .sorted(Comparator.comparing(Class::getName)) - .forEachOrdered(dkey -> DecorationToXContent.ToXContent(decorations.get(dkey), builder)); - builder.endArray(); - } - } - - @Override - public String toString() { - return builder.toString(); - } -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ToXContentTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ToXContentTests.java deleted file mode 100644 index 2ac349ba697cf..0000000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ToXContentTests.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.painless; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.painless.phase.UserTreeVisitor; -import org.elasticsearch.painless.symbol.ScriptScope; -import org.elasticsearch.painless.toxcontent.UserTreeToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public class ToXContentTests extends ScriptTestCase { - public void testUserFunction() { - Map func = getFunction("def twofive(int i) { return 25 + i; } int j = 23; twofive(j)", "twofive"); - assertFalse((Boolean) func.get("isInternal")); - assertFalse((Boolean) func.get("isStatic")); - assertEquals("SFunction", func.get("node")); - assertEquals("def", func.get("returns")); - assertEquals(List.of("int"), func.get("parameterTypes")); - assertEquals(List.of("i"), func.get("parameters")); - } - - public void testBlock() { - Map execute = getExecute("int i = 5; return i;"); - Map block = getNode(execute, "block", "SBlock"); - for (Object obj : (List) block.get("statements")) { - Map statement = (Map) obj; - } - Map decl = getStatement(block, "SDeclBlock"); - List decls = (List) decl.get("declarations"); - assertEquals(1, decls.size()); - assertEquals("i", ((Map) decls.get(0)).get("symbol")); - assertEquals("int", ((Map) decls.get(0)).get("type")); - - Map ret = getStatement(block, "SReturn"); - Map symbol = (Map) ((List) ret.get("value")).get(0); - assertEquals("ESymbol", symbol.get("node")); - assertEquals("i", symbol.get("symbol")); - } - - public void testFor() { - Map execute = getExecute("int q = 0; for (int j = 0; j < 100; j++) { q += j; } return q"); - Map sfor = getStatement(getNode(execute, "block", "SBlock"), "SFor"); - - Map ecomp = getNode(sfor, "condition", "EComp"); - assertEquals("j", getNode(ecomp, "left", "ESymbol").get("symbol")); - assertEquals("100", getNode(ecomp, "right", "ENumeric").get("numeric")); - assertEquals("less than", ((Map) ecomp.get("operation")).get("name")); - - Map init = getNode(sfor, "initializer", "SDeclBlock"); - Map decl = getNode(init, "declarations", "SDeclaration"); - assertEquals("j", decl.get("symbol")); - assertEquals("int", decl.get("type")); - assertEquals("0", getNode(decl, "value", "ENumeric").get("numeric")); - - Map after = getNode(sfor, "afterthought", "EAssignment"); - assertEquals("j", getNode(after, "left", "ESymbol").get("symbol")); - assertEquals("1", getNode(after, "right", "ENumeric").get("numeric")); - assertTrue((Boolean) after.get("postIfRead")); - } - - private Map getStatement(Map block, String node) { - return getNode(block, "statements", node); - } - - private Map getNode(Map map, String key, String node) { - for (Object obj : (List) map.get(key)) { - Map nodeMap = (Map) obj; - if (node.equals(nodeMap.get("node"))) { - return nodeMap; - } - } - fail("Missing node [" + node + "]"); - return Collections.emptyMap(); - } - - private Map getExecute(String script) { - return getFunction(script, "execute"); - } - - private Map getFunction(String script, String function) { - return getFunction(semanticPhase(script), function); - } - - private Map getFunction(XContentBuilder builder, String function) { - Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); - for (Object funcObj : ((List) map.get("functions"))) { - if (funcObj instanceof Map) { - if (function.equals(((Map) funcObj).get("name"))) { - return (Map) funcObj; - } - } - } - fail("Function [" + function + "] not found"); - return Collections.emptyMap(); - } - - private XContentBuilder semanticPhase(String script) { - XContentBuilder builder; - try { - builder = XContentFactory.jsonBuilder(); - } catch (IOException err) { - fail("script [" + script + "] threw IOException [" + err.getMessage() + "]"); - return null; - } - UserTreeVisitor semantic = new UserTreeToXContent(builder); - Debugger.phases(script, semantic, null, null); - Map map = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); - assertEquals(script, map.get("source")); - return builder; - } -} From df0fd30e7acc587874f4531cb68e4dfc45dc01e8 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 2 Apr 2024 09:35:02 +0300 Subject: [PATCH 60/69] [Doc] Privileges required to retrieve the status of async searches Document that users can retrieve the status of the async searches they submitted without any extra privileges. --- docs/reference/search/async-search.asciidoc | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/docs/reference/search/async-search.asciidoc b/docs/reference/search/async-search.asciidoc index ec8a95ec4006b..786cfaee8024c 100644 --- a/docs/reference/search/async-search.asciidoc +++ b/docs/reference/search/async-search.asciidoc @@ -143,8 +143,10 @@ allowed size for a stored async search response can be set by changing the ==== Get async search The get async search API retrieves the results of a previously submitted async -search request given its id. If the {es} {security-features} are enabled, the -access to the results of a specific async search is restricted to +search request given its id. + +If the {es} {security-features} are enabled, the access to the results of a +specific async search is restricted to only <>. [source,console,id=get-async-search-date-histogram-example] @@ -235,9 +237,13 @@ its saved results are deleted. ==== Get async search status The get async search status API, without retrieving search results, shows only -the status of a previously submitted async search request given its `id`. If the -{es} {security-features} are enabled, the access to the get async search status -API is restricted to the <>. +the status of a previously submitted async search request given its `id`. + +If the {es} {security-features} are enabled, the access to the status of a +specific async search is restricted to: + +* The <> the original async search request. +* Users that have the `monitor` cluster privilege or higher. You can also specify how long the async search needs to be available through the `keep_alive` parameter, which defaults to `5d` (five days). Ongoing async @@ -333,5 +339,5 @@ DELETE /_async_search/FmRldE8zREVEUzA2ZVpUeGs2ejJFUFEaMkZ5QTVrSTZSaVN3WlNFVmtlWH If the {es} {security-features} are enabled, the deletion of a specific async search is restricted to: - * The authenticated user that submitted the original search request. - * Users that have the `cancel_task` cluster privilege. + * The <> the original async search request. + * Users that have the `cancel_task` cluster privilege or higher. From 89cfb85c82927a1ba91bcf8874e312ed25eb0c54 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 2 Apr 2024 09:36:50 +0300 Subject: [PATCH 61/69] [Test] Fix AsyncSearchSecurityIT testStatusWithUsersWhileSearchIsRunning (#106912) The error_query is only available in snapshot builds. All test failures have the release-tests tag. Closes #106871 --- .../org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index 1f4830d8b6d0c..f628566587611 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.search; import org.apache.http.util.EntityUtils; +import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; @@ -178,6 +179,7 @@ public void testWithUsers() throws Exception { */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106871") public void testStatusWithUsersWhileSearchIsRunning() throws IOException { + assumeTrue("[error_query] is only available in snapshot builds", Build.current().isSnapshot()); String user = randomFrom("user1", "user2"); String other = user.equals("user1") ? "user2" : "user1"; String indexName = "index-" + user; From 0eca03cae1ea9bc696a6bd3f0c418ae2634645da Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Apr 2024 07:52:25 +0100 Subject: [PATCH 62/69] Remove unused `SnapshotsInRepo#remaining` (#106674) We only discard snapshots using the `?size` parameter when constructing the final response, so we can count the `remaining` snapshots in a local variable rather than tracking an unnecessary `0` on every `SnapshotsInRepo`. Indeed by inlining `sortSnapshots` into the routine that constructs the final response we can avoid even having to build the a final `SnapshotsInRepo` here. --- .../get/TransportGetSnapshotsAction.java | 56 +++++++------------ 1 file changed, 21 insertions(+), 35 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java index 6d29c36bdcda1..a66b318b16258 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/TransportGetSnapshotsAction.java @@ -67,7 +67,6 @@ import java.util.function.BooleanSupplier; import java.util.function.Predicate; import java.util.function.ToLongFunction; -import java.util.stream.Stream; /** * Transport Action for get snapshots operation @@ -181,7 +180,6 @@ private class GetSnapshotsOperation { // results private final Map failuresByRepository = ConcurrentCollections.newConcurrentMap(); private final Queue> allSnapshotInfos = ConcurrentCollections.newQueue(); - private final AtomicInteger remaining = new AtomicInteger(); private final AtomicInteger totalCount = new AtomicInteger(); GetSnapshotsOperation( @@ -256,7 +254,6 @@ void getMultipleReposSnapshotInfo(ActionListener listener) @Override public void onResponse(SnapshotsInRepo snapshotsInRepo) { allSnapshotInfos.add(snapshotsInRepo.snapshotInfos()); - remaining.addAndGet(snapshotsInRepo.remaining()); totalCount.addAndGet(snapshotsInRepo.totalCount()); delegate.onResponse(null); } @@ -275,26 +272,7 @@ public void onFailure(Exception e) { } }) - .addListener(listener.map(ignored -> { - assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); - cancellableTask.ensureNotCancelled(); - final var sortedSnapshotsInRepos = sortSnapshots( - allSnapshotInfos.stream().flatMap(Collection::stream), - totalCount.get(), - offset, - size - ); - final var snapshotInfos = sortedSnapshotsInRepos.snapshotInfos(); - assert indices || snapshotInfos.stream().allMatch(snapshotInfo -> snapshotInfo.indices().isEmpty()); - final int finalRemaining = sortedSnapshotsInRepos.remaining() + remaining.get(); - return new GetSnapshotsResponse( - snapshotInfos, - failuresByRepository, - finalRemaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, - totalCount.get(), - finalRemaining - ); - })); + .addListener(listener.map(ignored -> buildResponse())); } private boolean skipRepository(String repositoryName) { @@ -486,30 +464,40 @@ private SnapshotsInRepo buildSimpleSnapshotInfos( } private SnapshotsInRepo applyAfterPredicate(List snapshotInfos) { - return new SnapshotsInRepo(snapshotInfos.stream().filter(afterPredicate).toList(), snapshotInfos.size(), 0); + return new SnapshotsInRepo(snapshotInfos.stream().filter(afterPredicate).toList(), snapshotInfos.size()); } - private SnapshotsInRepo sortSnapshots(Stream snapshotInfoStream, int totalCount, int offset, int size) { + private GetSnapshotsResponse buildResponse() { assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); - final var resultsStream = snapshotInfoStream.peek(this::assertSatisfiesAllPredicates) + cancellableTask.ensureNotCancelled(); + int remaining = 0; + final var resultsStream = allSnapshotInfos.stream() + .flatMap(Collection::stream) + .peek(this::assertSatisfiesAllPredicates) .sorted(sortBy.getSnapshotInfoComparator(order)) .skip(offset); + final List snapshotInfos; if (size == GetSnapshotsRequest.NO_LIMIT) { - return new SnapshotsInRepo(resultsStream.toList(), totalCount, 0); + snapshotInfos = resultsStream.toList(); } else { final var allocateSize = Math.min(size, 1000); // ignore excessively-large sizes in request params - final var results = new ArrayList(allocateSize); - var remaining = 0; + snapshotInfos = new ArrayList<>(allocateSize); for (var iterator = resultsStream.iterator(); iterator.hasNext();) { final var snapshotInfo = iterator.next(); - if (results.size() < size) { - results.add(snapshotInfo); + if (snapshotInfos.size() < size) { + snapshotInfos.add(snapshotInfo); } else { remaining += 1; } } - return new SnapshotsInRepo(results, totalCount, remaining); } + return new GetSnapshotsResponse( + snapshotInfos, + failuresByRepository, + remaining > 0 ? sortBy.encodeAfterQueryParam(snapshotInfos.get(snapshotInfos.size() - 1)) : null, + totalCount.get(), + remaining + ); } private void assertSatisfiesAllPredicates(SnapshotInfo snapshotInfo) { @@ -684,9 +672,7 @@ private static int indexCount(SnapshotId snapshotId, RepositoryData repositoryDa } } - private record SnapshotsInRepo(List snapshotInfos, int totalCount, int remaining) { - private static final SnapshotsInRepo EMPTY = new SnapshotsInRepo(List.of(), 0, 0); - } + private record SnapshotsInRepo(List snapshotInfos, int totalCount) {} /** * Throttling executor for retrieving {@link SnapshotInfo} instances from the repository without spamming the SNAPSHOT_META threadpool From 519580e62b3a71cf2e9b6bd8ab14b92c6c5076b2 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Apr 2024 08:42:07 +0100 Subject: [PATCH 63/69] AwaitsFix for #106957 --- .../org/elasticsearch/indices/SystemIndexThreadPoolTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java index b97c39ce70792..7db5d10c5fcfa 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTests.java @@ -67,6 +67,7 @@ protected void runWithBlockedThreadPools(Runnable runnable) { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106957") public void testUserThreadPoolsAreBlocked() { assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); From 7ee63dfd512736ad5bbfe2cfb3cbe2fda605b64d Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 2 Apr 2024 09:18:59 +0100 Subject: [PATCH 64/69] Expand docs on reset desired balance API (#106921) Explains its purpose and surrounding context a little more, including a note that this should never be necessary (i.e. if you find you need it, that's a bug). --- docs/reference/cluster/delete-desired-balance.asciidoc | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/docs/reference/cluster/delete-desired-balance.asciidoc b/docs/reference/cluster/delete-desired-balance.asciidoc index 7b89fed881e41..f81dcab011da4 100644 --- a/docs/reference/cluster/delete-desired-balance.asciidoc +++ b/docs/reference/cluster/delete-desired-balance.asciidoc @@ -6,9 +6,13 @@ NOTE: {cloud-only} -Resets the desired balance and starts a new computation from the current allocation. -This API may be used if desired balance computation diverged from the current state -and is trying to move too many shards. +Discards the current desired balance and computes a new desired balance starting from the current allocation of shards. +This can sometimes help {es} find a desired balance which needs fewer shard movements to achieve, especially if the +cluster has experienced changes so substantial that the current desired balance is no longer optimal without {es} having +detected that the current desired balance will take more shard movements to achieve than needed. However, this API +imposes a heavy load on the elected master node and may not always have the expected effect. Calling this API should +never be necessary. Consider instead <> to avoid excessive shard movements. [[delete-desired-balance-request]] ==== {api-request-title} From 20fc2d2d5e00043e0d830de5e700a8bce7098b3e Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Tue, 2 Apr 2024 19:27:33 +1100 Subject: [PATCH 65/69] Test modifying and removing data in ProfileIT (#106582) User Profiles can be used to store application data against a user (e.g. user preferences). This commit extends the integration tests for profile date storage to include explict tests for partial updates and clearing existing data --- .../xpack/security/profile/ProfileIT.java | 139 +++++++++++++++++- 1 file changed, 132 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java index e87d548bc23f2..b11c8fd39fe2c 100644 --- a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java +++ b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -24,6 +25,9 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.junit.ClassRule; import java.io.IOException; @@ -253,11 +257,11 @@ public void testGetProfiles() throws IOException { errorDetails4.values().forEach(value -> assertThat(castToMap(value).get("type"), equalTo("resource_not_found_exception"))); } - public void testUpdateProfileData() throws IOException { + public void testStoreProfileData() throws IOException { final Map activateProfileMap = doActivateProfile(); final String uid = (String) activateProfileMap.get("uid"); - final Request updateProfileRequest1 = new Request(randomFrom("PUT", "POST"), "_security/profile/" + uid + "/_data"); - updateProfileRequest1.setJsonEntity(""" + final Request updateProfileRequest = new Request(randomFrom("PUT", "POST"), "_security/profile/" + uid + "/_data"); + updateProfileRequest.setJsonEntity(""" { "labels": { "app1": { "tags": [ "prod", "east" ] } @@ -266,11 +270,125 @@ public void testUpdateProfileData() throws IOException { "app1": { "theme": "default" } } }"""); - assertOK(adminClient().performRequest(updateProfileRequest1)); + assertOK(adminClient().performRequest(updateProfileRequest)); - final Map profileMap1 = doGetProfile(uid, "app1"); - assertThat(castToMap(profileMap1.get("labels")), equalTo(Map.of("app1", Map.of("tags", List.of("prod", "east"))))); - assertThat(castToMap(profileMap1.get("data")), equalTo(Map.of("app1", Map.of("theme", "default")))); + final Map profileMap = doGetProfile(uid, "app1"); + assertThat(castToMap(profileMap.get("labels")), equalTo(Map.of("app1", Map.of("tags", List.of("prod", "east"))))); + assertThat(castToMap(profileMap.get("data")), equalTo(Map.of("app1", Map.of("theme", "default")))); + } + + public void testModifyProfileData() throws IOException { + final Map activateProfileMap = doActivateProfile(); + final String uid = (String) activateProfileMap.get("uid"); + final String endpoint = "_security/profile/" + uid + "/_data"; + final String appName1 = randomAlphaOfLengthBetween(3, 5); + final String appName2 = randomAlphaOfLengthBetween(6, 8); + final List tags = randomList(1, 5, () -> randomAlphaOfLengthBetween(4, 12)); + final String labelKey = randomAlphaOfLengthBetween(4, 6); + final String dataKey1 = randomAlphaOfLengthBetween(3, 5); + final String dataKey2 = randomAlphaOfLengthBetween(6, 8); + final String dataKey3 = randomAlphaOfLengthBetween(9, 10); + final String dataValue1a = randomAlphaOfLengthBetween(6, 9); + final String dataValue1b = randomAlphaOfLengthBetween(10, 12); + final String dataValue2 = randomAlphaOfLengthBetween(6, 12); + final String dataValue3 = randomAlphaOfLengthBetween(4, 10); + + // Store the data + { + final Request updateProfileRequest = new Request(randomFrom("PUT", "POST"), endpoint); + final Map dataBlock = Map.ofEntries( + // { k1: v1, k2: v2 } + Map.entry(dataKey1, dataValue1a), + Map.entry(dataKey2, dataValue2) + ); + updateProfileRequest.setJsonEntity( + toJson( + Map.ofEntries( + Map.entry("labels", Map.of(appName1, Map.of(labelKey, tags))), + // Store the same data under both app-names + Map.entry("data", Map.of(appName1, dataBlock, appName2, dataBlock)) + ) + ) + ); + assertOK(adminClient().performRequest(updateProfileRequest)); + + final Map profileMap1 = doGetProfile(uid, appName1); + logger.info("Profile Map [{}][app={}] : {}", getTestName(), appName1, profileMap1); + assertThat(ObjectPath.eval("labels." + appName1 + "." + labelKey, profileMap1), equalTo(tags)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey1, profileMap1), equalTo(dataValue1a)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey2, profileMap1), equalTo(dataValue2)); + final Map profileMap2 = doGetProfile(uid, appName2); + logger.info("Profile Map [{}][app={}] : {}", getTestName(), appName2, profileMap2); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey1, profileMap2), equalTo(dataValue1a)); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey2, profileMap2), equalTo(dataValue2)); + } + + // Store modified data + { + // Add a new tag, remove an old one + final String newTag = randomValueOtherThanMany(tags::contains, () -> randomAlphaOfLengthBetween(3, 9)); + tags.remove(randomFrom(tags)); + tags.add(newTag); + final Request updateProfileRequest = new Request(randomFrom("PUT", "POST"), endpoint); + final Map dataBlock = Map.ofEntries( + // { k1: v1b, k3: v3 } + Map.entry(dataKey1, dataValue1b), + Map.entry(dataKey3, dataValue3) + ); + updateProfileRequest.setJsonEntity( + toJson( + Map.ofEntries( + Map.entry("labels", Map.of(appName1, Map.of(labelKey, tags))), + // We don't make any changes to appName2, so it should keep the original data + Map.entry("data", Map.of(appName1, dataBlock)) + ) + ) + ); + assertOK(adminClient().performRequest(updateProfileRequest)); + + final Map profileMap1 = doGetProfile(uid, appName1); + logger.info("Profile Map [{}][app={}] : {}", getTestName(), appName1, profileMap1); + assertThat(ObjectPath.eval("labels." + appName1 + "." + labelKey, profileMap1), equalTo(tags)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey1, profileMap1), equalTo(dataValue1b)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey2, profileMap1), equalTo(dataValue2)); + assertThat(ObjectPath.eval("data." + appName1 + "." + dataKey3, profileMap1), equalTo(dataValue3)); + final Map profileMap2 = doGetProfile(uid, appName2); + logger.info("Profile Map [{}][app={}] : {}", getTestName(), appName2, profileMap2); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey1, profileMap2), equalTo(dataValue1a)); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey2, profileMap2), equalTo(dataValue2)); + assertThat(ObjectPath.eval("data." + appName2 + "." + dataKey3, profileMap2), nullValue()); + } + } + + public void testRemoveProfileData() throws IOException { + final Map activateProfileMap = doActivateProfile(); + final String uid = (String) activateProfileMap.get("uid"); + { + final Request request = new Request(randomFrom("PUT", "POST"), "_security/profile/" + uid + "/_data"); + request.setJsonEntity(""" + { + "data": { + "app1": { "top": { "inner" : { "leaf": "data_value" } } } + } + }"""); + assertOK(adminClient().performRequest(request)); + + final Map profileMap = doGetProfile(uid, "app1"); + assertThat(ObjectPath.eval("data.app1.top.inner.leaf", profileMap), equalTo("data_value")); + } + { + final Request request = new Request(randomFrom("PUT", "POST"), "_security/profile/" + uid + "/_data"); + request.setJsonEntity(""" + { + "data": { + "app1": { "top": null } + } + }"""); + assertOK(adminClient().performRequest(request)); + + final Map profileMap = doGetProfile(uid, "app1"); + assertThat(ObjectPath.eval("data.app1.top", profileMap), nullValue()); + } } public void testSuggestProfile() throws IOException { @@ -559,4 +677,11 @@ private void doSetEnabled(String uid, boolean enabled) throws IOException { private Map castToMap(Object o) { return (Map) o; } + + private static String toJson(Map map) throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder().map(map); + final BytesReference bytes = BytesReference.bytes(builder); + return bytes.utf8ToString(); + } + } From 2380492fac306fe104115ea6dabc57fc7fb02cae Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 2 Apr 2024 10:31:00 +0200 Subject: [PATCH 66/69] ESQL: Support ST_CONTAINS and ST_WITHIN (#106503) * WIP Started adding ST_CONTAINS * Add generated evaluators * Reduced warnings and use correct evaluators * Refactored tests to remove duplicate code, and fixed Contains/multi-components * Gradle build disallows using getDeclaredField * Fixed cases where rectangles cross the dateline * Fixed meta function tests * Added ST_WITHIN to support inverting ST_CONTAINS If the ST_CONTAINS is called with the constant on the left, we either have to create a lot more Evaluators to cover that case, or we have to invert it to ST_WITHIN. This inversion was a much easier option. * Simplify inversion logic * Add comment on choice of surrogate approach * Add unit tests and missing fold() function * Simple code cleanup * Add integration tests for literals * Add more integration tests based on actual data * Generated documentation files * Add documentation * Fixed failing function count test * Add tests that push-to-source works for ST_CONTAINS and ST_WITHIN * Test more combinations of WITH/CONTAINS and literal on right and left This also verifies that the re-writing of CONTAINS to WITHIN or vice versa occurs when the literal is on the left. * test that physical planning also handles doc-values from STATS * Added more tests for WITHIN/CONTAINS together with CENTROID This should test the doc-values for points. * Add cartesian_point tests * Add cartesian_shape tests * Disable Lucene-push-down for CARTESIAN data This is a limitation in Lucene, which we could address as a performance optimization in a future PR, but since it probably requires Lucene changes, it cannot be done in this work. * Fix doc links * Added test data and tests for cartesian multi-polygons Testing INTERSECTS, CONTAINS and WITHIN with multi-polydon fields * Use required features for spatial points, shapes and centroid * 8.13.0 is not yet historical version This needs to be reverted as soon as 8.13.0 is released * Added st_intersects and st_contains_within 'features' * Code review updates * Re-enable lucene push-down * Added more required_features * Fix point contains non-point * Fix point contains point * Re-enable lucene push-down in tests too Forgot to change the physical planner unit tests after re-enabling lucene push-down * Generate automatic docs * Use generated examples docs * Generated examples use '-result' prefix (singular) * Mark spatial functions as preview/experimental --- docs/changelog/106503.yaml | 5 + .../functions/aggregation-functions.asciidoc | 2 +- .../description/st_contains.asciidoc | 7 + .../description/st_intersects.asciidoc | 2 + .../functions/description/st_within.asciidoc | 7 + .../functions/examples/st_contains.asciidoc | 13 + .../functions/examples/st_intersects.asciidoc | 13 + .../functions/examples/st_within.asciidoc | 13 + .../functions/layout/st_contains.asciidoc | 15 + .../functions/layout/st_intersects.asciidoc | 1 + .../esql/functions/layout/st_within.asciidoc | 15 + .../functions/parameters/st_contains.asciidoc | 7 + .../functions/parameters/st_within.asciidoc | 7 + .../esql/functions/signature/st_contains.svg | 1 + .../esql/functions/signature/st_within.svg | 1 + .../esql/functions/spatial-functions.asciidoc | 10 +- .../esql/functions/st_centroid.asciidoc | 2 + .../esql/functions/st_contains.asciidoc | 26 + .../esql/functions/st_intersects.asciidoc | 16 +- .../esql/functions/st_within.asciidoc | 26 + docs/reference/esql/functions/st_x.asciidoc | 2 + docs/reference/esql/functions/st_y.asciidoc | 2 + .../esql/functions/types/st_contains.asciidoc | 16 + .../esql/functions/types/st_within.asciidoc | 16 + .../xpack/esql/CsvTestsDataLoader.java | 8 +- .../resources/cartesian_multipolygons.csv | 11 + .../cartesian_multipolygons.csv-spec | 201 ++++++ .../mapping-cartesian_multipolygons.json | 13 + .../src/main/resources/meta.csv-spec | 6 +- .../src/main/resources/spatial.csv-spec | 616 +++++++++++++++--- .../main/resources/spatial_shapes.csv-spec | 198 +++++- ...ianPointDocValuesAndConstantEvaluator.java | 128 ++++ ...esianPointDocValuesAndSourceEvaluator.java | 142 ++++ ...nsCartesianSourceAndConstantEvaluator.java | 132 ++++ ...ainsCartesianSourceAndSourceEvaluator.java | 152 +++++ ...GeoPointDocValuesAndConstantEvaluator.java | 128 ++++ ...nsGeoPointDocValuesAndSourceEvaluator.java | 151 +++++ ...ContainsGeoSourceAndConstantEvaluator.java | 132 ++++ ...alContainsGeoSourceAndSourceEvaluator.java | 152 +++++ ...ianPointDocValuesAndConstantEvaluator.java | 128 ++++ ...esianPointDocValuesAndSourceEvaluator.java | 142 ++++ ...inCartesianSourceAndConstantEvaluator.java | 132 ++++ ...thinCartesianSourceAndSourceEvaluator.java | 152 +++++ ...GeoPointDocValuesAndConstantEvaluator.java | 128 ++++ ...inGeoPointDocValuesAndSourceEvaluator.java | 151 +++++ ...alWithinGeoSourceAndConstantEvaluator.java | 132 ++++ ...tialWithinGeoSourceAndSourceEvaluator.java | 152 +++++ .../function/EsqlFunctionRegistry.java | 4 + .../spatial/LuceneComponent2DUtils.java | 91 +++ .../scalar/spatial/SpatialContains.java | 287 ++++++++ .../spatial/SpatialEvaluatorFactory.java | 4 +- .../scalar/spatial/SpatialIntersects.java | 23 +- .../spatial/SpatialRelatesFunction.java | 24 +- .../scalar/spatial/SpatialRelatesUtils.java | 10 + .../scalar/spatial/SpatialWithin.java | 252 +++++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 23 +- .../esql/optimizer/LogicalPlanOptimizer.java | 23 +- .../xpack/esql/plugin/EsqlFeatures.java | 49 +- .../querydsl/query/SpatialRelatesQuery.java | 6 +- .../function/AbstractFunctionTestCase.java | 2 +- .../scalar/spatial/SpatialContainsTests.java | 46 ++ .../spatial/SpatialIntersectsTests.java | 173 +---- .../SpatialRelatesFunctionTestCase.java | 207 ++++++ .../scalar/spatial/SpatialWithinTests.java | 46 ++ .../optimizer/PhysicalPlanOptimizerTests.java | 172 +++++ 65 files changed, 4627 insertions(+), 327 deletions(-) create mode 100644 docs/changelog/106503.yaml create mode 100644 docs/reference/esql/functions/description/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/description/st_within.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_intersects.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_within.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/layout/st_within.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/parameters/st_within.asciidoc create mode 100644 docs/reference/esql/functions/signature/st_contains.svg create mode 100644 docs/reference/esql/functions/signature/st_within.svg create mode 100644 docs/reference/esql/functions/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/st_within.asciidoc create mode 100644 docs/reference/esql/functions/types/st_contains.asciidoc create mode 100644 docs/reference/esql/functions/types/st_within.asciidoc create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-cartesian_multipolygons.json create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/LuceneComponent2DUtils.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java diff --git a/docs/changelog/106503.yaml b/docs/changelog/106503.yaml new file mode 100644 index 0000000000000..1b7e78d8ffc27 --- /dev/null +++ b/docs/changelog/106503.yaml @@ -0,0 +1,5 @@ +pr: 106503 +summary: "Support ST_CONTAINS and ST_WITHIN" +area: "ES|QL" +type: enhancement +issues: [] diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 373b1c140a896..c040e7fe01327 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -16,7 +16,7 @@ The <> function supports these aggregate functions: * <> * <> * <> -* <> +* experimental:[] <> * <> * <> // end::agg_list[] diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc new file mode 100644 index 0000000000000..ed79fe3d9c1f3 --- /dev/null +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the first geometry contains the second geometry. + +NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index b736ba29a6c8b..3a36d79cbd123 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -3,3 +3,5 @@ *Description* Returns whether the two geometries or geometry columns intersect. + +NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc new file mode 100644 index 0000000000000..be52db3f694bf --- /dev/null +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -0,0 +1,7 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns whether the first geometry is within the second geometry. + +NOTE: The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/examples/st_contains.asciidoc b/docs/reference/esql/functions/examples/st_contains.asciidoc new file mode 100644 index 0000000000000..ad60944d28562 --- /dev/null +++ b/docs/reference/esql/functions/examples/st_contains.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_contains-airport_city_boundaries] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_contains-airport_city_boundaries-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_intersects.asciidoc b/docs/reference/esql/functions/examples/st_intersects.asciidoc new file mode 100644 index 0000000000000..135fd6afee17c --- /dev/null +++ b/docs/reference/esql/functions/examples/st_intersects.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_within.asciidoc b/docs/reference/esql/functions/examples/st_within.asciidoc new file mode 100644 index 0000000000000..0943ee4ba862a --- /dev/null +++ b/docs/reference/esql/functions/examples/st_within.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_within-airport_city_boundaries] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial_shapes.csv-spec[tag=st_within-airport_city_boundaries-result] +|=== + diff --git a/docs/reference/esql/functions/layout/st_contains.asciidoc b/docs/reference/esql/functions/layout/st_contains.asciidoc new file mode 100644 index 0000000000000..d2b115b5bd727 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_contains.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_contains]] +=== `ST_CONTAINS` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_contains.svg[Embedded,opts=inline] + +include::../parameters/st_contains.asciidoc[] +include::../description/st_contains.asciidoc[] +include::../types/st_contains.asciidoc[] +include::../examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_intersects.asciidoc b/docs/reference/esql/functions/layout/st_intersects.asciidoc index 1d0721b65606e..820bfd2c1fee6 100644 --- a/docs/reference/esql/functions/layout/st_intersects.asciidoc +++ b/docs/reference/esql/functions/layout/st_intersects.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] include::../parameters/st_intersects.asciidoc[] include::../description/st_intersects.asciidoc[] include::../types/st_intersects.asciidoc[] +include::../examples/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_within.asciidoc b/docs/reference/esql/functions/layout/st_within.asciidoc new file mode 100644 index 0000000000000..38e367abc3c31 --- /dev/null +++ b/docs/reference/esql/functions/layout/st_within.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-st_within]] +=== `ST_WITHIN` + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_within.svg[Embedded,opts=inline] + +include::../parameters/st_within.asciidoc[] +include::../description/st_within.asciidoc[] +include::../types/st_within.asciidoc[] +include::../examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/parameters/st_contains.asciidoc b/docs/reference/esql/functions/parameters/st_contains.asciidoc new file mode 100644 index 0000000000000..dbc9adf478948 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_contains.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/parameters/st_within.asciidoc b/docs/reference/esql/functions/parameters/st_within.asciidoc new file mode 100644 index 0000000000000..dbc9adf478948 --- /dev/null +++ b/docs/reference/esql/functions/parameters/st_within.asciidoc @@ -0,0 +1,7 @@ +*Parameters* + +`geomA`:: +Geometry column name or variable of geometry type + +`geomB`:: +Geometry column name or variable of geometry type diff --git a/docs/reference/esql/functions/signature/st_contains.svg b/docs/reference/esql/functions/signature/st_contains.svg new file mode 100644 index 0000000000000..dde41fc527454 --- /dev/null +++ b/docs/reference/esql/functions/signature/st_contains.svg @@ -0,0 +1 @@ +ST_CONTAINS(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/st_within.svg b/docs/reference/esql/functions/signature/st_within.svg new file mode 100644 index 0000000000000..a88522cfbc5cb --- /dev/null +++ b/docs/reference/esql/functions/signature/st_within.svg @@ -0,0 +1 @@ +ST_WITHIN(geomA,geomB) \ No newline at end of file diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index c1758f61de723..739d6b2d6f58f 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -8,11 +8,15 @@ {esql} supports these spatial functions: // tag::spatial_list[] -* <> -* <> -* <> +* experimental:[] <> +* experimental:[] <> +* experimental:[] <> +* experimental:[] <> +* experimental:[] <> // end::spatial_list[] include::st_intersects.asciidoc[] +include::st_contains.asciidoc[] +include::st_within.asciidoc[] include::st_x.asciidoc[] include::st_y.asciidoc[] diff --git a/docs/reference/esql/functions/st_centroid.asciidoc b/docs/reference/esql/functions/st_centroid.asciidoc index cee0c85d5cb45..e91a325a5597b 100644 --- a/docs/reference/esql/functions/st_centroid.asciidoc +++ b/docs/reference/esql/functions/st_centroid.asciidoc @@ -2,6 +2,8 @@ [[esql-agg-st-centroid]] === `ST_CENTROID` +experimental::[] + Calculate the spatial centroid over a field with spatial point geometry type. [source.merge.styled,esql] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc new file mode 100644 index 0000000000000..07b1a11aa7247 --- /dev/null +++ b/docs/reference/esql/functions/st_contains.asciidoc @@ -0,0 +1,26 @@ +[discrete] +[[esql-st_contains]] +=== `ST_CONTAINS` + +experimental::[] + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_contains.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +include::description/st_contains.asciidoc[] +This is the inverse of the `<>` function. + +include::types/st_contains.asciidoc[] +include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc index 1bf4cef0e2977..fbe313d10b0e7 100644 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ b/docs/reference/esql/functions/st_intersects.asciidoc @@ -2,6 +2,8 @@ [[esql-st_intersects]] === `ST_INTERSECTS` +experimental::[] + *Syntax* [.text-center] @@ -24,17 +26,5 @@ They intersect if they have any point in common, including their interior points (points along lines or within polygons). In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ -*Supported types* - include::types/st_intersects.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_intersects-airports-results] -|=== +include::examples/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc new file mode 100644 index 0000000000000..64adb91219c4a --- /dev/null +++ b/docs/reference/esql/functions/st_within.asciidoc @@ -0,0 +1,26 @@ +[discrete] +[[esql-st_within]] +=== `ST_WITHIN` + +experimental::[] + +*Syntax* + +[.text-center] +image::esql/functions/signature/st_within.svg[Embedded,opts=inline] + +*Parameters* + +`geomA`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. + +`geomB`:: +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. +The second parameter must also have the same coordinate system as the first. +This means it is not possible to combine `geo_*` and `cartesian_*` parameters. + +include::description/st_within.asciidoc[] +This is the inverse of the `<>` function. + +include::types/st_within.asciidoc[] +include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/st_x.asciidoc b/docs/reference/esql/functions/st_x.asciidoc index 692373f054d99..eec48894b5150 100644 --- a/docs/reference/esql/functions/st_x.asciidoc +++ b/docs/reference/esql/functions/st_x.asciidoc @@ -2,6 +2,8 @@ [[esql-st_x]] === `ST_X` +experimental::[] + *Syntax* [.text-center] diff --git a/docs/reference/esql/functions/st_y.asciidoc b/docs/reference/esql/functions/st_y.asciidoc index dba9b3d450006..8fc7281e395d2 100644 --- a/docs/reference/esql/functions/st_y.asciidoc +++ b/docs/reference/esql/functions/st_y.asciidoc @@ -2,6 +2,8 @@ [[esql-st_y]] === `ST_Y` +experimental::[] + *Syntax* [.text-center] diff --git a/docs/reference/esql/functions/types/st_contains.asciidoc b/docs/reference/esql/functions/types/st_contains.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_contains.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/docs/reference/esql/functions/types/st_within.asciidoc b/docs/reference/esql/functions/types/st_within.asciidoc new file mode 100644 index 0000000000000..36bd9cc036ade --- /dev/null +++ b/docs/reference/esql/functions/types/st_within.asciidoc @@ -0,0 +1,16 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +geomA | geomB | result +cartesian_point | cartesian_point | boolean +cartesian_point | cartesian_shape | boolean +cartesian_shape | cartesian_point | boolean +cartesian_shape | cartesian_shape | boolean +geo_point | geo_point | boolean +geo_point | geo_shape | boolean +geo_shape | geo_point | boolean +geo_shape | geo_shape | boolean +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index ec181c552bf22..b097d7f2d077a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -78,6 +78,11 @@ public class CsvTestsDataLoader { "mapping-airport_city_boundaries.json", "airport_city_boundaries.csv" ); + private static final TestsDataset CARTESIAN_MULTIPOLYGONS = new TestsDataset( + "cartesian_multipolygons", + "mapping-cartesian_multipolygons.json", + "cartesian_multipolygons.csv" + ); public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), @@ -96,7 +101,8 @@ public class CsvTestsDataLoader { Map.entry(AIRPORTS_WEB.indexName, AIRPORTS_WEB), Map.entry(COUNTRIES_BBOX.indexName, COUNTRIES_BBOX), Map.entry(COUNTRIES_BBOX_WEB.indexName, COUNTRIES_BBOX_WEB), - Map.entry(AIRPORT_CITY_BOUNDARIES.indexName, AIRPORT_CITY_BOUNDARIES) + Map.entry(AIRPORT_CITY_BOUNDARIES.indexName, AIRPORT_CITY_BOUNDARIES), + Map.entry(CARTESIAN_MULTIPOLYGONS.indexName, CARTESIAN_MULTIPOLYGONS) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv new file mode 100644 index 0000000000000..e65cdd29a22b8 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv @@ -0,0 +1,11 @@ +id:l, name:keyword, shape:cartesian_shape +0, Four squares, "MULTIPOLYGON(((0 0\, 1 0\, 1 1\, 0 1\, 0 0))\, ((2 0\, 3 0\, 3 1\, 2 1\, 2 0))\, ((2 2\, 3 2\, 3 3\, 2 3\, 2 2))\, ((0 2\, 1 2\, 1 3\, 0 3\, 0 2)))" +1, Bottom left, "POLYGON((0 0\, 1 0\, 1 1\, 0 1\, 0 0))" +2, Bottom right, "POLYGON((2 0\, 3 0\, 3 1\, 2 1\, 2 0))" +3, Top right, "POLYGON((2 2\, 3 2\, 3 3\, 2 3\, 2 2))" +4, Top left, "POLYGON((0 2\, 1 2\, 1 3\, 0 3\, 0 2))" +5, Four squares with holes, "MULTIPOLYGON(((0 0\, 1 0\, 1 1\, 0 1\, 0 0)\, (0.4 0.4\, 0.6 0.4\, 0.6 0.6\, 0.4 0.6\, 0.4 0.4))\, ((2 0\, 3 0\, 3 1\, 2 1\, 2 0)\, (2.4 0.4\, 2.6 0.4\, 2.6 0.6\, 2.4 0.6\, 2.4 0.4))\, ((2 2\, 3 2\, 3 3\, 2 3\, 2 2)\, (2.4 2.4\, 2.6 2.4\, 2.6 2.6\, 2.4 2.6\, 2.4 2.4))\, ((0 2\, 1 2\, 1 3\, 0 3\, 0 2)\, (0.4 2.4\, 0.6 2.4\, 0.6 2.6\, 0.4 2.6\, 0.4 2.4)))" +6, Bottom left with holes, "POLYGON((0 0\, 1 0\, 1 1\, 0 1\, 0 0)\, (0.4 0.4\, 0.6 0.4\, 0.6 0.6\, 0.4 0.6\, 0.4 0.4))" +7, Bottom right with holes, "POLYGON((2 0\, 3 0\, 3 1\, 2 1\, 2 0)\, (2.4 0.4\, 2.6 0.4\, 2.6 0.6\, 2.4 0.6\, 2.4 0.4))" +8, Top right with holes, "POLYGON((2 2\, 3 2\, 3 3\, 2 3\, 2 2)\, (2.4 2.4\, 2.6 2.4\, 2.6 2.6\, 2.4 2.6\, 2.4 2.4))" +9, Top left with holes, "POLYGON((0 2\, 1 2\, 1 3\, 0 3\, 0 2)\, (0.4 2.4\, 0.6 2.4\, 0.6 2.6\, 0.4 2.6\, 0.4 2.4))" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec new file mode 100644 index 0000000000000..c721d3c4899db --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -0,0 +1,201 @@ +#################################################################################################### +# The test data contains four square polygons, with and without holes, and multipolygon combinations of these +# We test this data against smaller, similar sized and larger query polygons with INTERSECTS, CONTAINS and WITHIN + +#################################################################################################### +# Test against a polygon similar in size to the Bottom Left polygon + +whereIntersectsSinglePolygon +required_feature: esql.st_intersects + +FROM cartesian_multipolygons +| WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +; + +whereContainsSinglePolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.001 0.001, 0.999 0.001, 0.999 0.999, 0.001 0.999, 0.001 0.001))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +; + +whereWithinSinglePolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +; + +#################################################################################################### +# Test against a polygon smaller in size to the Bottom Left polygon + +whereIntersectsSmallerPolygon +required_feature: esql.st_intersects + +FROM cartesian_multipolygons +| WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +; + +whereContainsSmallerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +; + +whereWithinSmallerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + +#################################################################################################### +# Test against a polygon similar in size to the entire test data + +whereIntersectsLargerPolygon +required_feature: esql.st_intersects + +FROM cartesian_multipolygons +| WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +; + +whereContainsLargerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + +whereWithinLargerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +; + +#################################################################################################### +# Test against a polygon larger than all test data + +whereIntersectsEvenLargerPolygon +required_feature: esql.st_intersects + +FROM cartesian_multipolygons +| WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +; + +whereContainsEvenLargerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +; + +whereWithinEvenLargerPolygon +required_feature: esql.st_contains_within + +FROM cartesian_multipolygons +| WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) +| SORT id +; + +id:l | name:keyword | shape:cartesian_shape +0 | Four squares | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0)), ((2 0, 3 0, 3 1, 2 1, 2 0)), ((2 2, 3 2, 3 3, 2 3, 2 2)), ((0 2, 1 2, 1 3, 0 3, 0 2))) +1 | Bottom left | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0)) +2 | Bottom right | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0)) +3 | Top right | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2)) +4 | Top left | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2)) +5 | Four squares with holes | MULTIPOLYGON(((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)), ((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)), ((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)), ((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4))) +6 | Bottom left with holes | POLYGON((0 0, 1 0, 1 1, 0 1, 0 0), (0.4 0.4, 0.6 0.4, 0.6 0.6, 0.4 0.6, 0.4 0.4)) +7 | Bottom right with holes | POLYGON((2 0, 3 0, 3 1, 2 1, 2 0), (2.4 0.4, 2.6 0.4, 2.6 0.6, 2.4 0.6, 2.4 0.4)) +8 | Top right with holes | POLYGON((2 2, 3 2, 3 3, 2 3, 2 2), (2.4 2.4, 2.6 2.4, 2.6 2.6, 2.4 2.6, 2.4 2.4)) +9 | Top left with holes | POLYGON((0 2, 1 2, 1 3, 0 3, 0 2), (0.4 2.4, 0.6 2.4, 0.6 2.6, 0.4 2.6, 0.4 2.4)) +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-cartesian_multipolygons.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-cartesian_multipolygons.json new file mode 100644 index 0000000000000..41fae0fbe4754 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-cartesian_multipolygons.json @@ -0,0 +1,13 @@ +{ + "properties": { + "id": { + "type": "long" + }, + "name": { + "type": "keyword" + }, + "shape": { + "type": "shape" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 9f9aeec7e2838..392d6f036111a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -64,7 +64,9 @@ sinh |"double sinh(angle:double|integer|long|unsigned_long)" split |"keyword split(string:keyword|text, delim:keyword|text)" |[string, delim] |["keyword|text", "keyword|text"] |["", ""] |keyword | "Split a single valued string into multiple strings." | [false, false] | false | false sqrt |"double sqrt(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "" |double | "Returns the square root of a number." | false | false | false st_centroid |"geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" |field |"geo_point|cartesian_point" | "" |"geo_point|cartesian_point" | "The centroid of a spatial field." | false | false | true +st_contains |"boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the first geometry contains the second geometry." | [false, false] | false | false st_intersects |"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the two geometries or geometry columns intersect." | [false, false] | false | false +st_within |"boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |["Geometry column name or variable of geometry type", "Geometry column name or variable of geometry type"] |boolean | "Returns whether the first geometry is within the second geometry." | [false, false] | false | false st_x |"double st_x(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the x-coordinate from a point geometry." | false | false | false st_y |"double st_y(point:geo_point|cartesian_point)" |point |"geo_point|cartesian_point" | "" |double | "Extracts the y-coordinate from a point geometry." | false | false | false starts_with |"boolean starts_with(str:keyword|text, prefix:keyword|text)" |[str, prefix] |["keyword|text", "keyword|text"] |["", ""] |boolean | "Returns a boolean that indicates whether a keyword string starts with another string" | [false, false] | false | false @@ -168,7 +170,9 @@ double pi() "keyword split(string:keyword|text, delim:keyword|text)" "double sqrt(number:double|integer|long|unsigned_long)" "geo_point|cartesian_point st_centroid(field:geo_point|cartesian_point)" +"boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" +"boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" "double st_x(point:geo_point|cartesian_point)" "double st_y(point:geo_point|cartesian_point)" "boolean starts_with(str:keyword|text, prefix:keyword|text)" @@ -223,5 +227,5 @@ countFunctions#[skip:-8.13.99] meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -97 | 97 | 97 +99 | 99 | 99 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 495d0cbb8d7f0..57554c41a6dec 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -1,9 +1,9 @@ ############################################### # Tests for GEO_POINT type -# +############################################### convertFromStringQuantize -required_feature: esql.geo_point +required_feature: esql.spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); @@ -12,7 +12,9 @@ wkt:keyword |pt:geo_point POINT(42.97109629958868 14.7552534006536) |POINT(42.97109629958868 14.7552534006536) ; -convertFromString#[skip:-8.12.99, reason:spatial type geo_point improved precision in 8.13] +convertFromString +required_feature: esql.spatial_points_from_source + // tag::to_geopoint-str[] ROW wkt = "POINT(42.97109630194 14.7552534413725)" | EVAL pt = TO_GEOPOINT(wkt) @@ -25,7 +27,9 @@ wkt:keyword |pt:geo_point // end::to_geopoint-str-result[] ; -convertFromStringArray#[skip:-8.12.99, reason:spatial type geo_point improved precision in 8.13] +convertFromStringArray +required_feature: esql.spatial_points_from_source + row wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geopoint(wkt); @@ -33,7 +37,9 @@ wkt:keyword ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] |[POINT(42.97109630194 14.7552534413725), POINT(75.8092915005895 22.727749187571)] ; -centroidFromStringNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromStringNested +required_feature: esql.st_centroid + row wkt = "POINT(42.97109629958868 14.7552534006536)" | STATS c = ST_CENTROID(TO_GEOPOINT(wkt)); @@ -41,7 +47,9 @@ c:geo_point POINT(42.97109629958868 14.7552534006536) ; -centroidFromString1#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromString1 +required_feature: esql.st_centroid + ROW wkt = ["POINT(42.97109629958868 14.7552534006536)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -51,7 +59,9 @@ c:geo_point POINT(42.97109629958868 14.7552534006536) ; -centroidFromString2#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromString2 +required_feature: esql.st_centroid + ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -61,7 +71,9 @@ c:geo_point POINT(59.390193899162114 18.741501288022846) ; -centroidFromString3#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromString3 +required_feature: esql.st_centroid + ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -99,7 +111,9 @@ POINT(42.97109629958868 14.7552534006536) | 42.97109629958868 | 14.755253400653 // end::st_x_y-result[] ; -simpleLoad#[skip:-8.12.99, reason:spatial type geo_point improved precision in 8.13] +simpleLoad +required_feature: esql.spatial_points_from_source + FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k @@ -131,7 +145,12 @@ c:long | x:double | y:double 19 | null | null ; -centroidFromAirports#[skip:-8.12.99, reason:st_centroid added in 8.13] +############################################### +# Tests for ST_CENTROID on GEO_POINT type + +centroidFromAirports +required_feature: esql.st_centroid + // tag::st_centroid-airports[] FROM airports | STATS centroid=ST_CENTROID(location) @@ -144,7 +163,9 @@ POINT(-0.030548143003023033 24.37553649504829) // end::st_centroid-airports-result[] ; -centroidFromAirportsNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsNested +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(TO_GEOPOINT(location)) ; @@ -153,7 +174,9 @@ centroid:geo_point POINT (-0.03054810272375508 24.37553651570554) ; -centroidFromAirportsCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCount +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() ; @@ -162,7 +185,9 @@ centroid:geo_point | count:long POINT(-0.030548143003023033 24.37553649504829) | 891 ; -centroidFromAirportsCountGrouped#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountGrouped +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | SORT scalerank DESC @@ -179,7 +204,9 @@ POINT(-26.976065734634176 42.907839377294295) | 24 | 3 POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; -centroidFromAirportsFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsFiltered +required_feature: esql.st_centroid + FROM airports | WHERE scalerank == 9 | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -189,7 +216,9 @@ centroid:geo_point | count:long POINT(83.27726172452623 28.99289782286029) | 33 ; -centroidFromAirportsCountGroupedCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountGroupedCentroid +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | STATS centroid=ST_CENTROID(centroid), count=SUM(count) @@ -199,7 +228,9 @@ centroid:geo_point | count:long POINT (7.572387259169772 26.836561792945492) | 891 ; -centroidFromAirportsCountCityLocations#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountCityLocations +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(city_location), count=COUNT() ; @@ -208,7 +239,9 @@ centroid:geo_point | count:long POINT (1.3965610809060276 24.127649406297987) | 891 ; -centroidFromAirportsCountGroupedCountry#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountGroupedCountry +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country | SORT count DESC, country ASC @@ -235,7 +268,9 @@ POINT (6.725663595240224 9.201645437966693) | 11 | Nigeria POINT (70.7946499697864 30.69746997440234) | 10 | Pakistan ; -centroidFromAirportsFilteredCountry#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsFilteredCountry +required_feature: esql.st_centroid + FROM airports | WHERE country == "United States" | STATS centroid=ST_CENTROID(city_location), count=COUNT() @@ -245,7 +280,9 @@ centroid:geo_point | count:long POINT (-97.3333946136801 38.07953176370194) | 129 ; -centroidFromAirportsCountGroupedCountryCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountGroupedCountryCentroid +required_feature: esql.st_centroid + FROM airports | STATS centroid=ST_CENTROID(city_location), count=COUNT() BY country | STATS centroid=ST_CENTROID(centroid), count=SUM(count) @@ -255,7 +292,9 @@ centroid:geo_point | count:long POINT (17.55538044598613 18.185558743854063) | 891 ; -centroidFromAirportsCountryCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsCountryCount +required_feature: esql.st_centroid + FROM airports | STATS airports=ST_CENTROID(location), cities=ST_CENTROID(city_location), count=COUNT() ; @@ -264,7 +303,9 @@ airports:geo_point | cities:geo_point POINT(-0.030548143003023033 24.37553649504829) | POINT (1.3965610809060276 24.127649406297987) | 891 ; -centroidFromAirportsFilteredAndSorted#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsFilteredAndSorted +required_feature: esql.st_centroid + FROM airports | WHERE scalerank == 9 | SORT abbrev @@ -276,7 +317,9 @@ centroid:geo_point | count:long POINT(78.73736493755132 26.761841227998957) | 12 ; -centroidFromAirportsAfterMvExpand#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsAfterMvExpand +required_feature: esql.st_centroid + FROM airports | MV_EXPAND type | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -286,7 +329,9 @@ centroid:geo_point | count:long POINT(2.121611400672094 24.559172889205755) | 933 ; -centroidFromAirportsGroupedAfterMvExpand#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsGroupedAfterMvExpand +required_feature: esql.st_centroid + FROM airports | MV_EXPAND type | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank @@ -304,7 +349,9 @@ POINT(-26.976065734634176 42.907839377294295) | 24 | 3 POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; -centroidFromAirportsGroupedAfterMvExpandFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsGroupedAfterMvExpandFiltered +required_feature: esql.st_centroid + FROM airports | WHERE scalerank == 9 | MV_EXPAND type @@ -315,7 +362,9 @@ centroid:geo_point | count:long | scalerank:i POINT(83.16847535921261 28.79002037679311) | 40 | 9 ; -centroidFromAirportsAfterMvExpandFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsAfterMvExpandFiltered +required_feature: esql.st_centroid + FROM airports | WHERE scalerank == 9 | MV_EXPAND type @@ -326,7 +375,9 @@ centroid:geo_point | count:long POINT(83.16847535921261 28.79002037679311) | 40 ; -centroidFromAirportsAfterKeywordPredicateCountryUK#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromAirportsAfterKeywordPredicateCountryUK +required_feature: esql.st_centroid + FROM airports | WHERE country == "United Kingdom" | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -336,7 +387,9 @@ centroid:geo_point | count:long POINT (-2.597342072712148 54.33551226578214) | 17 ; -centroidFromAirportsAfterIntersectsPredicateCountryUK#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterIntersectsPredicateCountryUK +required_feature: esql.st_intersects + FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -346,34 +399,70 @@ centroid:geo_point | count:long POINT (-2.597342072712148 54.33551226578214) | 17 ; -intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterContainsPredicateCountryUK +required_feature: esql.st_contains_within + +FROM airports +| WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))"), location) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-2.597342072712148 54.33551226578214) | 17 +; + +centroidFromAirportsAfterWithinPredicateCountryUK +required_feature: esql.st_contains_within + +FROM airports +| WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:geo_point | count:long +POINT (-2.597342072712148 54.33551226578214) | 17 +; + +intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK +required_feature: esql.st_intersects + FROM airports | WHERE country == "United Kingdom" | STATS centroid = ST_CENTROID(location), count=COUNT() | EVAL centroid_in_uk = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | EVAL centroid_in_iceland = ST_INTERSECTS(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) -| KEEP centroid, count, centroid_in_uk, centroid_in_iceland +| EVAL centroid_within_uk = ST_WITHIN(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL centroid_within_iceland = ST_WITHIN(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| EVAL centroid_contains_uk = ST_CONTAINS(centroid, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL centroid_contains_iceland = ST_CONTAINS(centroid, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| KEEP centroid, count, centroid_in_uk, centroid_in_iceland, centroid_within_uk, centroid_within_iceland, centroid_contains_uk, centroid_contains_iceland ; -centroid:geo_point | count:long | centroid_in_uk:boolean | centroid_in_iceland:boolean -POINT (-2.597342072712148 54.33551226578214) | 17 | true | false +centroid:geo_point | count:long | centroid_in_uk:boolean | centroid_in_iceland:boolean | centroid_within_uk:boolean | centroid_within_iceland:boolean | centroid_contains_uk:boolean | centroid_contains_iceland:boolean +POINT (-2.597342072712148 54.33551226578214) | 17 | true | false | true | false | false | false ; -centroidFromAirportsAfterIntersectsEvalExpression#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterIntersectsEvalExpression +required_feature: esql.st_intersects + FROM airports | EVAL in_uk = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) | EVAL in_iceland = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) -| STATS centroid = ST_CENTROID(location), count=COUNT() BY in_uk, in_iceland +| EVAL within_uk = ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) +| EVAL within_iceland = ST_WITHIN(location, TO_GEOSHAPE("POLYGON ((-25.4883 65.5312, -23.4668 66.7746, -18.4131 67.4749, -13.0957 66.2669, -12.3926 64.4159, -20.1270 62.7346, -24.7852 63.3718, -25.4883 65.5312))")) +| STATS centroid = ST_CENTROID(location), count=COUNT() BY in_uk, in_iceland, within_uk, within_iceland | SORT count ASC ; -centroid:geo_point | count:long | in_uk:boolean | in_iceland:boolean -POINT (-21.946634463965893 64.13187285885215) | 1 | false | true -POINT (-2.597342072712148 54.33551226578214) | 17 | true | false -POINT (0.04453958108176276 23.74658354606057) | 873 | false | false +centroid:geo_point | count:long | in_uk:boolean | in_iceland:boolean | within_uk:boolean | within_iceland:boolean +POINT (-21.946634463965893 64.13187285885215) | 1 | false | true | false | true +POINT (-2.597342072712148 54.33551226578214) | 17 | true | false | true | false +POINT (0.04453958108176276 23.74658354606057) | 873 | false | false | false | false ; -centroidFromAirportsAfterIntersectsPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterIntersectsPredicate +required_feature: esql.st_intersects + FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -383,7 +472,9 @@ centroid:geo_point | count:long POINT (42.97109629958868 14.7552534006536) | 1 ; -centroidFromAirportsAfterIntersectsCompoundPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +centroidFromAirportsAfterIntersectsCompoundPredicate +required_feature: esql.st_intersects + FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -393,29 +484,38 @@ centroid:geo_point | count:long POINT (42.97109629958868 14.7552534006536) | 1 ; -pointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +############################################### +# Tests for ST_INTERSECTS on GEO_POINT type + +pointIntersectsLiteralPolygon +required_feature: esql.st_intersects + +// tag::st_intersects-airports[] FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +// end::st_intersects-airports[] ; +// tag::st_intersects-airports-result[] abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +// end::st_intersects-airports-result[] ; -pointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] -// tag::st_intersects-airports[] +pointIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + FROM airports | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) -// end::st_intersects-airports[] ; -// tag::st_intersects-airports-results[] abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid -// end::st_intersects-airports-results[] ; -literalPointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -427,7 +527,9 @@ wkt:keyword | pt:geo_point "POINT(1 -1)" | POINT(1 -1) ; -literalPointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -439,7 +541,9 @@ wkt:keyword | pt:geo_point "POINT(1 -1)" | POINT(1 -1) ; -literalPointIntersectsLiteralPolygonOneRow#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygonOneRow +required_feature: esql.st_intersects + ROW intersects = ST_INTERSECTS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -447,7 +551,9 @@ intersects:boolean true ; -cityInCityBoundary#[skip:-8.13.99, reason:st_intersects added in 8.14] +cityInCityBoundary +required_feature: esql.st_intersects + FROM airport_city_boundaries | EVAL in_city = ST_INTERSECTS(city_location, city_boundary) | STATS count=COUNT(*) BY in_city @@ -461,7 +567,9 @@ cardinality:k | in_city:boolean "many" | true ; -cityNotInCityBoundaryBiggest#[skip:-8.13.99, reason:st_intersects added in 8.14] +cityNotInCityBoundaryBiggest +required_feature: esql.st_intersects + FROM airport_city_boundaries | WHERE NOT ST_INTERSECTS(city_location, city_boundary) | EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) @@ -474,7 +582,9 @@ abbrev:keyword | airport:text | city:keyword | city_location:geo_poi SYX | Sanya Phoenix Int'l | Sanya | POINT(109.5036 18.2533) | 598 | POLYGON((109.1802 18.4609, 109.2304 18.4483, 109.2311 18.4261, 109.2696 18.411, 109.2602 18.3581, 109.2273 18.348, 109.2286 18.2638, 109.2842 18.2665, 109.3518 18.2166, 109.4508 18.1936, 109.4895 18.2281, 109.5137 18.2283, 109.4914 18.2781, 109.5041 18.2948, 109.4809 18.3034, 109.5029 18.3422, 109.5249 18.3375, 109.4993 18.3632, 109.535 18.4007, 109.5104 18.4374, 109.5231 18.4474, 109.5321 18.53, 109.4992 18.5568, 109.4192 18.5646, 109.4029 18.6302, 109.3286 18.5772, 109.309 18.5191, 109.2913 18.5141, 109.2434 18.5607, 109.2022 18.5572, 109.1815 18.5163, 109.1908 18.4711, 109.1802 18.4609))) ; -airportCityLocationPointIntersection#[skip:-8.13.99, reason:st_intersects added in 8.14] +airportCityLocationPointIntersection +required_feature: esql.st_intersects + FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) ; @@ -483,7 +593,9 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword XXX | Atlantis | POINT(0 0) | Atlantis | POINT(0 0) | Atlantis Int'l | 1 | mid ; -airportCityLocationPointIntersectionCentroid#[skip:-8.13.99, reason:st_intersects added in 8.14] +airportCityLocationPointIntersectionCentroid +required_feature: esql.st_intersects + FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) | STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() @@ -493,7 +605,192 @@ location:geo_point | city_location:geo_point | count:long POINT (0 0) | POINT (0 0) | 1 ; -geoPointEquals#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] +############################################### +# Tests for ST_CONTAINS on GEO_POINT type + +literalPolygonContainsLiteralPoint +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPointDoesNotContainLiteralPolygon +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_CONTAINS(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +; + +literalPolygonContainsLiteralPointOneRow +required_feature: esql.st_contains_within + +ROW contains = ST_CONTAINS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) +; + +contains:boolean +true +; + +literalPointDoesNotContainLiteralPolygonOneRow +required_feature: esql.st_contains_within + +ROW contains = ST_CONTAINS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +contains:boolean +false +; + +pointContainsLiteralPolygon +required_feature: esql.st_contains_within + +FROM airports +| WHERE ST_CONTAINS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +; + +pointContainedInLiteralPolygon +required_feature: esql.st_contains_within + +FROM airports +| WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +; + +airportCityLocationPointContains +required_feature: esql.st_contains_within + +FROM airports_mp +| WHERE ST_CONTAINS(location, city_location) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +XXX | Atlantis | POINT(0 0) | Atlantis | POINT(0 0) | Atlantis Int'l | 1 | mid +; + +airportCityLocationPointContainsCentroid +required_feature: esql.st_contains_within + +FROM airports_mp +| WHERE ST_CONTAINS(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (0 0) | POINT (0 0) | 1 +; + +############################################### +# Tests for ST_WITHIN on GEO_POINT type + +literalPolygonNotWithinLiteralPoint +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_WITHIN(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:geo_point +; + +literalPointWithinLiteralPolygon +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_GEOPOINT(wkt) +| WHERE ST_WITHIN(pt, TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:geo_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +literalPolygonNotWithinLiteralPointOneRow +required_feature: esql.st_contains_within + +ROW within = ST_WITHIN(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) +; + +within:boolean +false +; + +literalPointWithinLiteralPolygonOneRow +required_feature: esql.st_contains_within + +ROW within = ST_WITHIN(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +within:boolean +true +; + +pointWithinLiteralPolygon +required_feature: esql.st_contains_within + +// tag::st_within-airports[] +FROM airports +| WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) +// end::st_within-airports[] +; + +// tag::st_within-airports-results[] +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen | POINT(42.97109630194 14.7552534413725) | Hodeidah Int'l | 9 | mid +// end::st_within-airports-results[] +; + +airportCityLocationPointWithin +required_feature: esql.st_contains_within + +FROM airports_mp +| WHERE ST_WITHIN(location, city_location) +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | scalerank:i | type:k +XXX | Atlantis | POINT(0 0) | Atlantis | POINT(0 0) | Atlantis Int'l | 1 | mid +; + +airportCityLocationPointWithinCentroid +required_feature: esql.st_contains_within + +FROM airports_mp +| WHERE ST_WITHIN(location, city_location) +| STATS location=ST_CENTROID(location), city_location=ST_CENTROID(city_location), count=COUNT() +; + +location:geo_point | city_location:geo_point | count:long +POINT (0 0) | POINT (0 0) | 1 +; + +############################################### +# Tests for Equality and casting with GEO_POINT + +geoPointEquals +required_feature: esql.spatial_points_from_source + // tag::to_geopoint-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -508,7 +805,9 @@ wkt:keyword |pt:geo_point // end::to_geopoint-equals-result[] ; -geoPointNotEquals#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] +geoPointNotEquals +required_feature: esql.spatial_points_from_source + // tag::to_geopoint-not-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -523,7 +822,9 @@ wkt:keyword |pt:geo_point // end::to_geopoint-not-equals-result[] ; -convertFromStringParseError#[skip:-8.12.99, reason:spatial type geo_point improved in 8.13] +convertFromStringParseError +required_feature: esql.spatial_points_from_source + // tag::to_geopoint-str-parse-error[] row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] | mv_expand wkt @@ -547,9 +848,11 @@ wkt:keyword |pt:geo_point ############################################### # Tests for CARTESIAN_POINT type -# +############################################### + +convertCartesianFromString +required_feature: esql.spatial_points_from_source -convertCartesianFromString#[skip:-8.12.99, reason:spatial type cartesian_point improved precision in 8.13] // tag::to_cartesianpoint-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -564,7 +867,9 @@ wkt:keyword |pt:cartesian_point // end::to_cartesianpoint-str-result[] ; -convertCartesianFromStringArray#[skip:-8.12.99, reason:spatial type cartesian_point improved precision in 8.13] +convertCartesianFromStringArray +required_feature: esql.spatial_points_from_source + row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianpoint(wkt); @@ -572,7 +877,9 @@ wkt:keyword |pt:cartesian_point ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] |[POINT(4297.11 -1475.53), POINT(7580.93 2272.77)] ; -centroidCartesianFromStringNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidCartesianFromStringNested +required_feature: esql.st_centroid + row wkt = "POINT(4297.10986328125 -1475.530029296875)" | STATS c = ST_CENTROID(TO_CARTESIANPOINT(wkt)); @@ -580,7 +887,9 @@ c:cartesian_point POINT(4297.10986328125 -1475.530029296875) ; -centroidFromCartesianString1#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromCartesianString1 +required_feature: esql.st_centroid + ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) @@ -590,7 +899,9 @@ c:cartesian_point POINT(4297.10986328125 -1475.530029296875) ; -centroidFromCartesianString2#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromCartesianString2 +required_feature: esql.st_centroid + ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) @@ -600,7 +911,9 @@ c:cartesian_point POINT(5939.02001953125 398.6199951171875) ; -centroidFromCartesianString3#[skip:-8.12.99, reason:st_centroid added in 8.13] +centroidFromCartesianString3 +required_feature: esql.st_centroid + ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)", "POINT(-30.548143003023033 2437.553649504829)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) @@ -621,7 +934,9 @@ point:cartesian_point | x:double | y:double POINT(4297.10986328125 -1475.530029296875) | 4297.10986328125 | -1475.530029296875 ; -simpleCartesianLoad#[skip:-8.12.99, reason:spatial type cartesian_point improved precision in 8.13] +simpleCartesianLoad +required_feature: esql.spatial_points_from_source + FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k @@ -635,7 +950,12 @@ WIIT | POINT (11708145.489503577 -584415.9142832769) | Radin Inten II ZAH | POINT (6779435.866395892 3436280.545331025) | Zahedan Int'l | 9 | mid ; -cartesianCentroidFromAirports#[skip:-8.12.99, reason:st_centroid added in 8.13] +############################################### +# Tests for ST_CENTROID on CARTESIAN_POINT type + +cartesianCentroidFromAirports +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(location); @@ -643,7 +963,9 @@ centroid:cartesian_point POINT(-266681.67563861894 3053301.5120195406) ; -cartesianCentroidFromAirportsNested#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsNested +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(TO_CARTESIANPOINT(location)); @@ -651,7 +973,9 @@ centroid:cartesian_point POINT (-266681.66530554957 3053301.506061676) ; -cartesianCentroidFromAirportsCount#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsCount +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(location), count=COUNT() ; @@ -660,7 +984,9 @@ centroid:cartesian_point | count:long POINT(-266681.67563861894 3053301.5120195406) | 849 ; -cartesianCentroidFromAirportsCountGrouped#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsCountGrouped +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | SORT scalerank DESC @@ -677,7 +1003,9 @@ POINT(-3002961.9270833335 5451641.91796875) | 24 | 3 POINT(140136.12878224207 3081220.7881944445) | 63 | 2 ; -cartesianCentroidFromAirportsFiltered#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsFiltered +required_feature: esql.st_centroid + FROM airports_web | WHERE scalerank == 9 | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -687,7 +1015,9 @@ centroid:cartesian_point | count:long POINT(9289013.153846154 3615537.0533353365) | 26 ; -cartesianCentroidFromAirportsFilteredAndSorted#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsFilteredAndSorted +required_feature: esql.st_centroid + FROM airports_web | WHERE scalerank == 9 | SORT abbrev @@ -699,7 +1029,9 @@ centroid:cartesian_point | count:long POINT(9003597.4375 3429344.0078125) | 8 ; -cartesianCentroidFromAirportsCountGroupedCentroid#[skip:-8.12.99, reason:st_centroid added in 8.13] +cartesianCentroidFromAirportsCountGroupedCentroid +required_feature: esql.st_centroid + FROM airports_web | STATS centroid=ST_CENTROID(location), count=COUNT() BY scalerank | STATS centroid=ST_CENTROID(centroid), count=SUM(count) @@ -709,7 +1041,12 @@ centroid:cartesian_point | count:long POINT (726480.0130685265 3359566.331716279) | 849 ; -cartesianCentroidFromAirportsAfterIntersectsPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +############################################### +# Tests for ST_INTERSECTS on CARTESIAN_POINT type + +cartesianCentroidFromAirportsAfterIntersectsPredicate +required_feature: esql.st_intersects + FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) | STATS centroid=ST_CENTROID(location), count=COUNT() @@ -719,7 +1056,9 @@ centroid:cartesian_point | count:long POINT (4783520.5 1661010.0) | 1 ; -cartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +cartesianPointIntersectsPolygon +required_feature: esql.st_intersects + FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) ; @@ -728,7 +1067,9 @@ abbrev:keyword | location:cartesian_point | name:text | HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid ; -literalCartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalCartesianPointIntersectsPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANPOINT(wkt) @@ -740,7 +1081,126 @@ wkt:keyword | pt:cartesian_point "POINT(1 -1)" | POINT(1 -1) ; -cartesianPointEquals#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] +############################################### +# Tests for ST_CONTAINS on CARTESIAN_POINT type + +cartesianCentroidFromAirportsAfterPolygonContainsPointPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPolygonContainsPointPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +literalCartesianPolygonContainsPointPredicate +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), pt) +; + +wkt:keyword | pt:cartesian_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +cartesianCentroidFromAirportsAfterPointContainsPolygonPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (NaN NaN) | 0 +; + +cartesianPointContainsPolygonPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +; + +literalCartesianPointContainsPolygonPredicate +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_CONTAINS(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +; + +############################################### +# Tests for ST_WITHIN on CARTESIAN_POINT type + +cartesianCentroidFromAirportsAfterWithinPredicate +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +| STATS centroid=ST_CENTROID(location), count=COUNT() +; + +centroid:cartesian_point | count:long +POINT (4783520.5 1661010.0) | 1 +; + +cartesianPointWithinPolygon +required_feature: esql.st_contains_within + +FROM airports_web +| WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) +; + +abbrev:keyword | location:cartesian_point | name:text | scalerank:i | type:k +HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | 9 | mid +; + +literalCartesianPointWithinPolygon +required_feature: esql.st_contains_within + +ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] +| MV_EXPAND wkt +| EVAL pt = TO_CARTESIANPOINT(wkt) +| WHERE ST_WITHIN(pt, TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) +; + +wkt:keyword | pt:cartesian_point +"POINT(1 1)" | POINT(1 1) +"POINT(1 -1)" | POINT(1 -1) +; + +############################################### +# Tests for Equality and casting with GEO_POINT + +cartesianPointEquals +required_feature: esql.spatial_points_from_source + // tag::to_cartesianpoint-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -755,7 +1215,9 @@ wkt:keyword |pt:cartesian_point // end::to_cartesianpoint-equals-result[] ; -cartesianPointNotEquals#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] +cartesianPointNotEquals +required_feature: esql.spatial_points_from_source + // tag::to_cartesianpoint-not-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -770,7 +1232,9 @@ wkt:keyword |pt:cartesian_point // end::to_cartesianpoint-not-equals-result[] ; -convertCartesianFromStringParseError#[skip:-8.12.99, reason:spatial type cartesian_point improved in 8.13] +convertCartesianFromStringParseError +required_feature: esql.spatial_points_from_source + // tag::to_cartesianpoint-str-parse-error[] row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] | mv_expand wkt diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index 69e56c7efe55d..f010ed13370e0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -2,7 +2,9 @@ # Tests for GEO_SHAPE type # -convertFromString#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +convertFromString +required_feature: esql.spatial_shapes + // tag::to_geoshape-str[] ROW wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" | EVAL geom = TO_GEOSHAPE(wkt) @@ -15,7 +17,9 @@ wkt:keyword | geom:geo_shape // end::to_geoshape-str-result[] ; -convertFromStringArray#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +convertFromStringArray +required_feature: esql.spatial_shapes + row wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geoshape(wkt); @@ -23,7 +27,9 @@ wkt:keyword ["POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))", "POINT(75.8092915005895 22.727749187571)"] |[POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10)), POINT(75.8092915005895 22.727749187571)] ; -convertFromStringViaPoint#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +convertFromStringViaPoint +required_feature: esql.spatial_shapes + ROW wkt = "POINT (30 10)" | EVAL point = TO_GEOPOINT(wkt) | EVAL shape = TO_GEOSHAPE(point) @@ -34,14 +40,18 @@ wkt:keyword | point:geo_point | shape:geo_shape ; # need to work out how to upload WKT -simpleLoad#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +simpleLoad +required_feature: esql.spatial_shapes + FROM countries_bbox | WHERE id == "ISL"; id:keyword| name:keyword| shape:geo_shape ISL|Iceland|BBOX(-24.538400, -13.499446, 66.536100, 63.390000) ; -simpleLoadPointsAsShapes#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +simpleLoadPointsAsShapes +required_feature: esql.spatial_shapes + FROM airports | WHERE abbrev == "CPH" OR abbrev == "VLC" | SORT abbrev @@ -66,7 +76,12 @@ abbrev:keyword | region:text | city_location:geo_point | airport:tex CPH | Københavns Kommune | POINT(12.5683 55.6761) | Copenhagen | 265 ; -pointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +############################################### +# Tests for ST_INTERSECTS with GEO_SHAPE + +pointIntersectsLiteralPolygon +required_feature: esql.st_intersects + FROM airports | EVAL location = TO_GEOSHAPE(location) | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) @@ -77,7 +92,9 @@ abbrev:keyword | name:text | location:geo_shape | count HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen | Al Ḩudaydah | POINT(42.9511 14.8022) ; -polygonIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +polygonIntersectsLiteralPolygon +required_feature: esql.st_intersects + FROM airport_city_boundaries | WHERE ST_INTERSECTS(city_boundary, TO_GEOSHAPE("POLYGON((109.4 18.1, 109.6 18.1, 109.6 18.3, 109.4 18.3, 109.4 18.1))")) | KEEP abbrev, airport, region, city, city_location @@ -88,7 +105,9 @@ abbrev:keyword | airport:text | region:text | city:keyword | city_locati SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(109.5036 18.2533) ; -pointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +pointIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + FROM airports | EVAL location = TO_GEOSHAPE(location) | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) @@ -99,7 +118,9 @@ abbrev:keyword | name:text | location:geo_shape | count HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen | Al Ḩudaydah | POINT(42.9511 14.8022) ; -literalPointIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -111,7 +132,9 @@ wkt:keyword | pt:geo_point "POINT(1 -1)" | POINT(1 -1) ; -literalPointIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) @@ -123,7 +146,9 @@ wkt:keyword | pt:geo_point "POINT(1 -1)" | POINT(1 -1) ; -literalPointAsShapeIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointAsShapeIntersectsLiteralPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOSHAPE(wkt) @@ -135,7 +160,9 @@ wkt:keyword | pt:geo_shape "POINT(1 -1)" | POINT(1 -1) ; -literalPointAsShapeIntersectsLiteralPolygonReversed#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPointAsShapeIntersectsLiteralPolygonReversed +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_GEOSHAPE(wkt) @@ -147,7 +174,9 @@ wkt:keyword | pt:geo_shape "POINT(1 -1)" | POINT(1 -1) ; -shapeIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +shapeIntersectsLiteralPolygon +required_feature: esql.st_intersects + FROM countries_bbox | WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((29 -30, 31 -30, 31 -27.3, 29 -27.3, 29 -30))")) | SORT id DESC @@ -159,7 +188,9 @@ SWZ | Swaziland | BBOX(30.798336, 32.133400, -25.728336, -27.316391) LSO | Lesotho | BBOX(27.013973, 29.455554, -28.570691, -30.650527) ; -literalPolygonIntersectsLiteralPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPolygonIntersectsLiteralPolygon +required_feature: esql.st_intersects + ROW wkt = ["POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))", "POLYGON((20 60, 6 60, 6 66, 20 66, 20 60))"] | EVAL other = TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))") | MV_EXPAND wkt @@ -172,7 +203,9 @@ wkt:keyword | shape:geo_shape "POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))" | POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60)) | POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64)) ; -literalPolygonIntersectsLiteralPolygonOneRow#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalPolygonIntersectsLiteralPolygonOneRow +required_feature: esql.st_intersects + ROW intersects = ST_INTERSECTS(TO_GEOSHAPE("POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))"), TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))")) ; @@ -180,7 +213,49 @@ intersects:boolean true ; -geo_shapeEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +############################################### +# Tests for ST_CONTAINS and ST_WITHIN with GEO_SHAPE + +polygonContainsLiteralPolygon +required_feature: esql.st_contains_within + +// tag::st_contains-airport_city_boundaries[] +FROM airport_city_boundaries +| WHERE ST_CONTAINS(city_boundary, TO_GEOSHAPE("POLYGON((109.35 18.3, 109.45 18.3, 109.45 18.4, 109.35 18.4, 109.35 18.3))")) +| KEEP abbrev, airport, region, city, city_location +// end::st_contains-airport_city_boundaries[] +| LIMIT 1 +; + +// tag::st_contains-airport_city_boundaries-result[] +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(109.5036 18.2533) +// end::st_contains-airport_city_boundaries-result[] +; + +polygonWithinLiteralPolygon +required_feature: esql.st_contains_within + +// tag::st_within-airport_city_boundaries[] +FROM airport_city_boundaries +| WHERE ST_WITHIN(city_boundary, TO_GEOSHAPE("POLYGON((109.1 18.15, 109.6 18.15, 109.6 18.65, 109.1 18.65, 109.1 18.15))")) +| KEEP abbrev, airport, region, city, city_location +// end::st_within-airport_city_boundaries[] +| LIMIT 1 +; + +// tag::st_within-airport_city_boundaries-result[] +abbrev:keyword | airport:text | region:text | city:keyword | city_location:geo_point +SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(109.5036 18.2533) +// end::st_within-airport_city_boundaries-result[] +; + +############################################### +# Tests for Equality and casting with GEO_SHAPE + +geo_shapeEquals +required_feature: esql.spatial_shapes + ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt | EVAL pt = to_geoshape(wkt) @@ -191,7 +266,9 @@ wkt:keyword |pt:geo_shape "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" |POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10)) ; -geo_shapeNotEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +geo_shapeNotEquals +required_feature: esql.spatial_shapes + ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt | EVAL pt = to_geoshape(wkt) @@ -202,7 +279,9 @@ wkt:keyword |pt:geo_shape "POINT(75.8092915005895 22.727749187571)" |POINT(75.8092915005895 22.727749187571) ; -convertFromStringParseError#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +convertFromStringParseError +required_feature: esql.spatial_shapes + row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] | mv_expand wkt | eval pt = to_geoshape(wkt) @@ -222,7 +301,9 @@ wkt:keyword |pt:geo_shape # Tests for CARTESIAN_SHAPE type # -convertCartesianShapeFromString#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +convertCartesianShapeFromString +required_feature: esql.spatial_shapes + // tag::to_cartesianshape-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))"] | MV_EXPAND wkt @@ -237,7 +318,9 @@ wkt:keyword |geom:cartesian_shape // end::to_cartesianshape-str-result[] ; -convertCartesianFromStringArray#[skip:-8.12.99, reason:spatial type cartesian_shape only added in 8.13] +convertCartesianFromStringArray +required_feature: esql.spatial_shapes + row wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianshape(wkt); @@ -245,7 +328,9 @@ wkt:keyword ["POLYGON ((3339584.72 1118889.97\, 4452779.63 4865942.27\, 2226389.81 4865942.27\, 1113194.90 2273030.92\, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] |[POLYGON ((3339584.72 1118889.97\, 4452779.63 4865942.27\, 2226389.81 4865942.27\, 1113194.90 2273030.92\, 3339584.72 1118889.97)), POINT(7580.93 2272.77)] ; -convertCartesianFromStringViaPoint#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +convertCartesianFromStringViaPoint +required_feature: esql.spatial_shapes + ROW wkt = "POINT (3010 -1010)" | EVAL point = TO_CARTESIANPOINT(wkt) | EVAL shape = TO_CARTESIANSHAPE(point) @@ -256,14 +341,18 @@ wkt:keyword | point:cartesian_point | shape:cartesian_shape ; # need to work out how to upload WKT -simpleCartesianShapeLoad#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +simpleCartesianShapeLoad +required_feature: esql.spatial_shapes + FROM countries_bbox_web | WHERE id == "ISL"; id:keyword| name:keyword|shape:cartesian_shape ISL|Iceland|BBOX(-2731602.192501422, -1502751.454502109, 1.0025136653899286E7, 9196525.03584683) ; -simpleLoadCartesianPointsAsShapes#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +simpleLoadCartesianPointsAsShapes +required_feature: esql.spatial_shapes + FROM airports_web | WHERE abbrev == "CPH" OR abbrev == "VLC" | SORT abbrev @@ -275,7 +364,12 @@ abbrev:keyword | name:text | scalerank:integer | type:keyword | location:cart "VLC" | "Valencia" | 8 | "mid" | POINT(-52706.98819688343 4792315.469321795) ; -cartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +############################################### +# Tests for ST_INTERSECTS with CARTESIAN_SHAPE + +cartesianPointIntersectsPolygon +required_feature: esql.st_intersects + FROM airports_web | EVAL location = TO_CARTESIANSHAPE(location) | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -286,7 +380,9 @@ abbrev:keyword | name:text | location:cartesian_shape | HOD | Hodeidah Int'l | POINT (4783520.559160681 1661010.0197476079) | 9 | mid ; -literalCartesianPointIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalCartesianPointIntersectsPolygon +required_feature: esql.st_intersects + ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt | EVAL pt = TO_CARTESIANSHAPE(wkt) @@ -298,7 +394,9 @@ wkt:keyword | pt:cartesian_shape "POINT(1 -1)" | POINT(1 -1) ; -cartesianShapeIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +cartesianShapeIntersectsPolygon +required_feature: esql.st_intersects + FROM countries_bbox_web | WHERE ST_INTERSECTS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) | SORT id DESC @@ -310,7 +408,9 @@ SWZ | Swaziland | BBOX(3428455.080322901, 3577073.7249586442, -2965472 LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117.2692412077, -3587446.106149188) ; -literalCartesianPolygonIntersectsPolygon#[skip:-8.13.99, reason:st_intersects added in 8.14] +literalCartesianPolygonIntersectsPolygon +required_feature: esql.st_intersects + ROW wkt = ["POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))", "POLYGON((2000 6000, 600 6000, 600 6600, 2000 6600, 2000 6000))"] | MV_EXPAND wkt | EVAL shape = TO_CARTESIANSHAPE(wkt) @@ -322,7 +422,41 @@ wkt:keyword | shape:ca "POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))" | POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000)) | POLYGON((-1500 6400, -1000 6400, -1000 6600, -1500 6600, -1500 6400)) ; -cartesianshapeEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +############################################### +# Tests for ST_CONTAINS and ST_WITHIN with CARTESIAN_SHAPE + +cartesianShapeContainsPolygon +required_feature: esql.st_contains_within + +FROM countries_bbox_web +| WHERE ST_CONTAINS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) +| SORT id DESC +; + +id:keyword | name:keyword | shape:cartesian_shape +ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908.4975596936, -5937134.146607068) +; + +cartesianShapeWithinPolygon +required_feature: esql.st_contains_within + +FROM countries_bbox_web +| WHERE ST_WITHIN(shape, TO_CARTESIANSHAPE("POLYGON((1800000 -2500000, 4300000 -2500000, 4300000 -6000000, 1800000 -6000000, 1800000 -2500000))")) +| SORT id DESC +; + +id:keyword | name:keyword | shape:cartesian_shape +ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908.4975596936, -5937134.146607068) +SWZ | Swaziland | BBOX(3428455.080322901, 3577073.7249586442, -2965472.9128583763, -3163056.5390926218) +LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117.2692412077, -3587446.106149188) +; + +############################################### +# Tests for Equality and casting with CARTESIAN_SHAPE + +cartesianshapeEquals +required_feature: esql.spatial_shapes + ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt | EVAL pt = to_cartesianshape(wkt) @@ -333,7 +467,9 @@ wkt:keyword |pt:cartesian_shape "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))" |POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97)) ; -cartesianShapeNotEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +cartesianShapeNotEquals +required_feature: esql.spatial_shapes + ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt | EVAL pt = to_cartesianshape(wkt) @@ -344,7 +480,9 @@ wkt:keyword |pt:cartesian_shape "POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) ; -convertCartesianShapeFromStringParseError#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +convertCartesianShapeFromStringParseError +required_feature: esql.spatial_shapes + row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] | mv_expand wkt | eval pt = to_cartesianshape(wkt) diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..ce7e2889fc298 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialContainsCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialContainsCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialContainsCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..5b536707e8a0f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialContainsCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialContains.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialContainsCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialContainsCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..55dfbede4c003 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialContainsCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialContainsCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialContainsCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..d2456597b5761 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialContainsCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialContainsCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialContainsCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..957800fb3c38e --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialContainsGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialContainsGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialContainsGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..348c343f0b005 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialContainsGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialContainsGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialContainsGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..186eacc680c2c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialContainsGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialContainsGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialContainsGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..6bff91629f74c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialContains}. + * This class is generated. Do not edit it. + */ +public final class SpatialContainsGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialContainsGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialContains.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialContains.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialContainsGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialContainsGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialContainsGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialContainsGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..6deb7133fcf13 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinCartesianPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialWithinCartesianPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinCartesianPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialWithinCartesianPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialWithinCartesianPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..d2470583c3a7c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,142 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinCartesianPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialWithinCartesianPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector).asBlock(); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } + return result.build(); + } + } + + public BooleanVector eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanVector.Builder result = driverContext.blockFactory().newBooleanVectorBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + result.appendBoolean(SpatialWithin.processCartesianPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinCartesianPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialWithinCartesianPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialWithinCartesianPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..45c8f60d12b03 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinCartesianSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialWithinCartesianSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processCartesianSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processCartesianSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinCartesianSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialWithinCartesianSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialWithinCartesianSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..958ac825eeb0b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinCartesianSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinCartesianSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialWithinCartesianSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processCartesianSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processCartesianSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinCartesianSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialWithinCartesianSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialWithinCartesianSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java new file mode 100644 index 0000000000000..680cf7b38445b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndConstantEvaluator.java @@ -0,0 +1,128 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinGeoPointDocValuesAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialWithinGeoPointDocValuesAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndConstant(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndConstant(leftValueVector.getLong(p), rightValue)); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinGeoPointDocValuesAndConstantEvaluator get(DriverContext context) { + return new SpatialWithinGeoPointDocValuesAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialWithinGeoPointDocValuesAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java new file mode 100644 index 0000000000000..624b9243a62c4 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoPointDocValuesAndSourceEvaluator.java @@ -0,0 +1,151 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinGeoPointDocValuesAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialWithinGeoPointDocValuesAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (LongBlock leftValueBlock = (LongBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + LongVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, LongBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndSource(leftValueBlock.getLong(leftValueBlock.getFirstValueIndex(p)), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, LongVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processGeoPointDocValuesAndSource(leftValueVector.getLong(p), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinGeoPointDocValuesAndSourceEvaluator get(DriverContext context) { + return new SpatialWithinGeoPointDocValuesAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialWithinGeoPointDocValuesAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java new file mode 100644 index 0000000000000..3647594337c57 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndConstantEvaluator.java @@ -0,0 +1,132 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinGeoSourceAndConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final Component2D rightValue; + + private final DriverContext driverContext; + + public SpatialWithinGeoSourceAndConstantEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, Component2D rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock); + } + return eval(page.getPositionCount(), leftValueVector); + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processGeoSourceAndConstant(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processGeoSourceAndConstant(leftValueVector.getBytesRef(p, leftValueScratch), rightValue)); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final Component2D rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + Component2D rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinGeoSourceAndConstantEvaluator get(DriverContext context) { + return new SpatialWithinGeoSourceAndConstantEvaluator(source, leftValue.get(context), rightValue, context); + } + + @Override + public String toString() { + return "SpatialWithinGeoSourceAndConstantEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java new file mode 100644 index 0000000000000..8794c3d0488b3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinGeoSourceAndSourceEvaluator.java @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import java.io.IOException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link SpatialWithin}. + * This class is generated. Do not edit it. + */ +public final class SpatialWithinGeoSourceAndSourceEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator leftValue; + + private final EvalOperator.ExpressionEvaluator rightValue; + + private final DriverContext driverContext; + + public SpatialWithinGeoSourceAndSourceEvaluator(Source source, + EvalOperator.ExpressionEvaluator leftValue, EvalOperator.ExpressionEvaluator rightValue, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.leftValue = leftValue; + this.rightValue = rightValue; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (BytesRefBlock leftValueBlock = (BytesRefBlock) leftValue.eval(page)) { + try (BytesRefBlock rightValueBlock = (BytesRefBlock) rightValue.eval(page)) { + BytesRefVector leftValueVector = leftValueBlock.asVector(); + if (leftValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + BytesRefVector rightValueVector = rightValueBlock.asVector(); + if (rightValueVector == null) { + return eval(page.getPositionCount(), leftValueBlock, rightValueBlock); + } + return eval(page.getPositionCount(), leftValueVector, rightValueVector); + } + } + } + + public BooleanBlock eval(int positionCount, BytesRefBlock leftValueBlock, + BytesRefBlock rightValueBlock) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + if (leftValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (leftValueBlock.getValueCount(p) != 1) { + if (leftValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (rightValueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (rightValueBlock.getValueCount(p) != 1) { + if (rightValueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendBoolean(SpatialWithin.processGeoSourceAndSource(leftValueBlock.getBytesRef(leftValueBlock.getFirstValueIndex(p), leftValueScratch), rightValueBlock.getBytesRef(rightValueBlock.getFirstValueIndex(p), rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public BooleanBlock eval(int positionCount, BytesRefVector leftValueVector, + BytesRefVector rightValueVector) { + try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { + BytesRef leftValueScratch = new BytesRef(); + BytesRef rightValueScratch = new BytesRef(); + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendBoolean(SpatialWithin.processGeoSourceAndSource(leftValueVector.getBytesRef(p, leftValueScratch), rightValueVector.getBytesRef(p, rightValueScratch))); + } catch (IllegalArgumentException | IOException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "SpatialWithinGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(leftValue, rightValue); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory leftValue; + + private final EvalOperator.ExpressionEvaluator.Factory rightValue; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory leftValue, + EvalOperator.ExpressionEvaluator.Factory rightValue) { + this.source = source; + this.leftValue = leftValue; + this.rightValue = rightValue; + } + + @Override + public SpatialWithinGeoSourceAndSourceEvaluator get(DriverContext context) { + return new SpatialWithinGeoSourceAndSourceEvaluator(source, leftValue.get(context), rightValue.get(context), context); + } + + @Override + public String toString() { + return "SpatialWithinGeoSourceAndSourceEvaluator[" + "leftValue=" + leftValue + ", rightValue=" + rightValue + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 6f1f01bbe632c..9f0976e0045d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -79,7 +79,9 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -182,7 +184,9 @@ private FunctionDefinition[][] functions() { // spatial new FunctionDefinition[] { def(SpatialCentroid.class, SpatialCentroid::new, "st_centroid"), + def(SpatialContains.class, SpatialContains::new, "st_contains"), def(SpatialIntersects.class, SpatialIntersects::new, "st_intersects"), + def(SpatialWithin.class, SpatialWithin::new, "st_within"), def(StX.class, StX::new, "st_x"), def(StY.class, StY::new, "st_y") }, // conditional diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/LuceneComponent2DUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/LuceneComponent2DUtils.java new file mode 100644 index 0000000000000..e7b3292b3714d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/LuceneComponent2DUtils.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.geo.Rectangle; +import org.apache.lucene.geo.XYGeometry; + +import java.util.ArrayList; +import java.util.List; + +/** + * This utilities class provides access to protected methods in Lucene using alternative APIs. + * For example, the 'create' method returns the original Component2D array, instead of a Component2D containing + * a component tree of potentially multiple components. This is particularly useful for algorithms that need to + * operate on each component individually. + */ +public class LuceneComponent2DUtils { + /** + * This method is based on LatLonGeometry.create, but returns an array of Component2D objects for multi-component geometries. + */ + public static Component2D[] createLatLonComponents(LatLonGeometry... latLonGeometries) { + if (latLonGeometries == null) { + throw new IllegalArgumentException("geometries must not be null"); + } else if (latLonGeometries.length == 0) { + throw new IllegalArgumentException("geometries must not be empty"); + } else { + final List components = new ArrayList<>(latLonGeometries.length); + + for (int i = 0; i < latLonGeometries.length; ++i) { + if (latLonGeometries[i] == null) { + throw new IllegalArgumentException("geometries[" + i + "] must not be null"); + } + + if (latLonGeometries[i] instanceof Rectangle rectangle && rectangle.crossesDateline()) { + addRectangle(components, rectangle); + } else { + components.add(LatLonGeometry.create(latLonGeometries[i])); + } + } + + return components.toArray(new Component2D[0]); + } + } + + private static void addRectangle(List components, Rectangle rectangle) { + double minLongitude = rectangle.minLon; + boolean crossesDateline = rectangle.minLon > rectangle.maxLon; + if (minLongitude == 180.0 && crossesDateline) { + minLongitude = -180.0; + crossesDateline = false; + } + if (crossesDateline) { + Rectangle left = new Rectangle(rectangle.minLat, rectangle.maxLat, -180.0, rectangle.maxLon); + Rectangle right = new Rectangle(rectangle.minLat, rectangle.maxLat, minLongitude, 180.0); + components.add(LatLonGeometry.create(left)); + components.add(LatLonGeometry.create(right)); + } else { + components.add(LatLonGeometry.create(rectangle)); + } + } + + /** + * This method is based on XYGeometry.create, but returns an array of Component2D objects for multi-component geometries. + */ + public static Component2D[] createXYComponents(XYGeometry... xyGeometries) { + if (xyGeometries == null) { + throw new IllegalArgumentException("geometries must not be null"); + } else if (xyGeometries.length == 0) { + throw new IllegalArgumentException("geometries must not be empty"); + } else { + Component2D[] components = new Component2D[xyGeometries.length]; + + for (int i = 0; i < xyGeometries.length; ++i) { + if (xyGeometries[i] == null) { + throw new IllegalArgumentException("geometries[" + i + "] must not be null"); + } + + components[i] = XYGeometry.create(xyGeometries[i]); + } + + return components; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java new file mode 100644 index 0000000000000..8bf33a7e3dc61 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -0,0 +1,287 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.index.mapper.ShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2Ds; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.makeGeometryFromLiteral; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +/** + * This is the primary class for supporting the function ST_CONTAINS. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_CONTAINS and QueryRelation.CONTAINS. + */ +public class SpatialContains extends SpatialRelatesFunction { + // public for test access with reflection + public static final SpatialRelationsContains GEO = new SpatialRelationsContains( + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Contains") + ); + // public for test access with reflection + public static final SpatialRelationsContains CARTESIAN = new SpatialRelationsContains( + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Contains") + ); + + /** + * We override the normal behaviour for CONTAINS because we need to test each component separately. + * This applies to multi-component geometries (MultiPolygon, etc.) as well as polygons that cross the dateline. + */ + static final class SpatialRelationsContains extends SpatialRelations { + SpatialRelationsContains(SpatialCoordinateTypes spatialCoordinateType, CoordinateEncoder encoder, ShapeIndexer shapeIndexer) { + super(ShapeField.QueryRelation.CONTAINS, spatialCoordinateType, encoder, shapeIndexer); + } + + @Override + protected boolean geometryRelatesGeometry(BytesRef left, BytesRef right) throws IOException { + Component2D[] rightComponent2Ds = asLuceneComponent2Ds(crsType, fromBytesRef(right)); + return geometryRelatesGeometries(left, rightComponent2Ds); + } + + private boolean geometryRelatesGeometries(BytesRef left, Component2D[] rightComponent2Ds) throws IOException { + Geometry leftGeom = fromBytesRef(left); + GeometryDocValueReader leftDocValueReader = asGeometryDocValueReader(coordinateEncoder, shapeIndexer, leftGeom); + return geometryRelatesGeometries(leftDocValueReader, rightComponent2Ds); + } + + private boolean geometryRelatesGeometries(GeometryDocValueReader leftDocValueReader, Component2D[] rightComponent2Ds) + throws IOException { + for (Component2D rightComponent2D : rightComponent2Ds) { + // Every component of the right geometry must be contained within the left geometry for this to pass + if (geometryRelatesGeometry(leftDocValueReader, rightComponent2D) == false) { + return false; + } + } + return true; + } + } + + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the first geometry contains the second geometry.", + note = "The second parameter must also have the same coordinate system as the first. " + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", + examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") + ) + public SpatialContains( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + SpatialContains(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.CONTAINS; + } + + @Override + public SpatialContains withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialContains(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialContains replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialContains(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialContains::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Geometry rightGeom = makeGeometryFromLiteral(right()); + Component2D[] components = asLuceneComponent2Ds(crsType, rightGeom); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometries(docValueReader, components) + : CARTESIAN.geometryRelatesGeometries(docValueReader, components); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + Map> evaluatorRules() { + return evaluatorMap; + } + + /** + * To keep the number of evaluators to a minimum, we swap the arguments to get the WITHIN relation. + * This also makes other optimizations, like lucene-pushdown, simpler to develop. + */ + @Override + public SpatialRelatesFunction surrogate() { + if (left().foldable() && right().foldable() == false) { + return new SpatialWithin(source(), right(), left(), rightDocValues, leftDocValues); + } + return this; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialContainsGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialContainsGeoSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialContainsGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialContainsGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialContainsCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialContainsCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialContainsCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialContainsCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java index ccdd68e1806c1..cea7d926c3e39 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialEvaluatorFactory.java @@ -38,7 +38,7 @@ public abstract EvalOperator.ExpressionEvaluator.Factory get( Function toEvaluator ); - public static EvalOperator.ExpressionEvaluator.Factory makeSpatialEvaluator( + static EvalOperator.ExpressionEvaluator.Factory makeSpatialEvaluator( SpatialSourceSupplier s, Map> evaluatorRules, Function toEvaluator @@ -170,7 +170,7 @@ public EvalOperator.ExpressionEvaluator.Factory get( protected record SpatialEvaluatorFieldKey(DataType dataType, boolean isConstant) {} - protected record SpatialEvaluatorKey( + record SpatialEvaluatorKey( SpatialRelatesFunction.SpatialCrsType crsType, boolean leftDocValues, boolean rightDocValues, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 831c041caaa94..93965b0d3e9be 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -18,6 +18,7 @@ import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -40,21 +41,35 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; +/** + * This is the primary class for supporting the function ST_INTERSECTS. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_INTERSECTS and QueryRelation.INTERSECTS. + */ public class SpatialIntersects extends SpatialRelatesFunction { - protected static final SpatialRelations GEO = new SpatialRelations( + // public for test access with reflection + public static final SpatialRelations GEO = new SpatialRelations( ShapeField.QueryRelation.INTERSECTS, SpatialCoordinateTypes.GEO, CoordinateEncoder.GEO, new GeoShapeIndexer(Orientation.CCW, "ST_Intersects") ); - protected static final SpatialRelations CARTESIAN = new SpatialRelations( + // public for test access with reflection + public static final SpatialRelations CARTESIAN = new SpatialRelations( ShapeField.QueryRelation.INTERSECTS, SpatialCoordinateTypes.CARTESIAN, CoordinateEncoder.CARTESIAN, new CartesianShapeIndexer("ST_Intersects") ); - @FunctionInfo(returnType = { "boolean" }, description = "Returns whether the two geometries or geometry columns intersect.") + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the two geometries or geometry columns intersect.", + note = "The second parameter must also have the same coordinate system as the first. " + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", + examples = @Example(file = "spatial", tag = "st_intersects-airports") + ) public SpatialIntersects( Source source, @Param( @@ -112,7 +127,7 @@ public Object fold() { } @Override - protected Map> evaluatorRules() { + Map> evaluatorRules() { return evaluatorMap; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index cdd21682d0db7..09938558b6cce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -35,6 +35,7 @@ import java.util.function.Function; import java.util.function.Predicate; +import static org.apache.lucene.document.ShapeField.QueryRelation.CONTAINS; import static org.apache.lucene.document.ShapeField.QueryRelation.DISJOINT; import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpatial; import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; @@ -203,7 +204,14 @@ public boolean rightDocValues() { /** * Produce a map of rules defining combinations of incoming types to the evaluator factory that should be used. */ - protected abstract Map> evaluatorRules(); + abstract Map> evaluatorRules(); + + /** + * Some spatial functions can replace themselves with alternatives that are more efficient for certain cases. + */ + public SpatialRelatesFunction surrogate() { + return this; + } @Override public EvalOperator.ExpressionEvaluator.Factory toEvaluator( @@ -262,7 +270,7 @@ protected boolean geometryRelatesGeometry(BytesRef left, BytesRef right) throws return geometryRelatesGeometry(left, rightComponent2D); } - private Geometry fromBytesRef(BytesRef bytesRef) { + protected Geometry fromBytesRef(BytesRef bytesRef) { return SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(bytesRef); } @@ -286,12 +294,16 @@ protected boolean pointRelatesGeometry(long encoded, Geometry geometry) { protected boolean pointRelatesGeometry(long encoded, Component2D component2D) { // This code path exists for doc-values points, and we could consider re-using the point class to reduce garbage creation Point point = spatialCoordinateType.longAsPoint(encoded); - return geometryRelatesPoint(component2D, point); + return pointRelatesGeometry(point, component2D); } - private boolean geometryRelatesPoint(Component2D component2D, Point point) { - boolean contains = component2D.contains(point.getX(), point.getY()); - return queryRelation == DISJOINT ? contains == false : contains; + private boolean pointRelatesGeometry(Point point, Component2D component2D) { + if (queryRelation == CONTAINS) { + return component2D.withinPoint(point.getX(), point.getY()) == Component2D.WithinRelation.CANDIDATE; + } else { + boolean contains = component2D.contains(point.getX(), point.getY()); + return queryRelation == DISJOINT ? contains == false : contains; + } } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java index e088dbf7a70ec..d558e1c21c045 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesUtils.java @@ -50,6 +50,16 @@ static Component2D asLuceneComponent2D(SpatialRelatesFunction.SpatialCrsType crs } } + static Component2D[] asLuceneComponent2Ds(SpatialRelatesFunction.SpatialCrsType crsType, Geometry geometry) { + if (crsType == SpatialRelatesFunction.SpatialCrsType.GEO) { + var luceneGeometries = LuceneGeometriesUtils.toLatLonGeometry(geometry, true, t -> {}); + return LuceneComponent2DUtils.createLatLonComponents(luceneGeometries); + } else { + var luceneGeometries = LuceneGeometriesUtils.toXYGeometry(geometry, t -> {}); + return LuceneComponent2DUtils.createXYComponents(luceneGeometries); + } + } + /** * This function is used to convert a spatial constant to a doc-values byte array. * When both left and right sides are constants, we convert the left to a doc-values byte array and the right to a Component2D. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java new file mode 100644 index 0000000000000..a5ade4cfeb73c --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -0,0 +1,252 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import org.apache.lucene.document.ShapeField; +import org.apache.lucene.geo.Component2D; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.expression.SurrogateExpression; +import org.elasticsearch.xpack.esql.expression.function.Example; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asGeometryDocValueReader; +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils.asLuceneComponent2D; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; + +/** + * This is the primary class for supporting the function ST_WITHIN. + * The bulk of the capabilities are within the parent class SpatialRelatesFunction, + * which supports all the relations in the ShapeField.QueryRelation enum. + * Here we simply wire the rules together specific to ST_WITHIN and QueryRelation.WITHIN. + */ +public class SpatialWithin extends SpatialRelatesFunction implements SurrogateExpression { + // public for test access with reflection + public static final SpatialRelations GEO = new SpatialRelations( + ShapeField.QueryRelation.WITHIN, + SpatialCoordinateTypes.GEO, + CoordinateEncoder.GEO, + new GeoShapeIndexer(Orientation.CCW, "ST_Within") + ); + // public for test access with reflection + public static final SpatialRelations CARTESIAN = new SpatialRelations( + ShapeField.QueryRelation.WITHIN, + SpatialCoordinateTypes.CARTESIAN, + CoordinateEncoder.CARTESIAN, + new CartesianShapeIndexer("ST_Within") + ); + + @FunctionInfo( + returnType = { "boolean" }, + description = "Returns whether the first geometry is within the second geometry.", + note = "The second parameter must also have the same coordinate system as the first. " + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters.", + examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") + ) + public SpatialWithin( + Source source, + @Param( + name = "geomA", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression left, + @Param( + name = "geomB", + type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, + description = "Geometry column name or variable of geometry type" + ) Expression right + ) { + this(source, left, right, false, false); + } + + SpatialWithin(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { + super(source, left, right, leftDocValues, rightDocValues); + } + + @Override + public ShapeField.QueryRelation queryRelation() { + return ShapeField.QueryRelation.WITHIN; + } + + @Override + public SpatialWithin withDocValues(Set attributes) { + // Only update the docValues flags if the field is found in the attributes + boolean leftDV = leftDocValues || foundField(left(), attributes); + boolean rightDV = rightDocValues || foundField(right(), attributes); + return new SpatialWithin(source(), left(), right(), leftDV, rightDV); + } + + @Override + protected SpatialWithin replaceChildren(Expression newLeft, Expression newRight) { + return new SpatialWithin(source(), newLeft, newRight, leftDocValues, rightDocValues); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, SpatialWithin::new, left(), right()); + } + + @Override + public Object fold() { + try { + GeometryDocValueReader docValueReader = asGeometryDocValueReader(crsType, left()); + Component2D component2D = asLuceneComponent2D(crsType, right()); + return (crsType == SpatialCrsType.GEO) + ? GEO.geometryRelatesGeometry(docValueReader, component2D) + : CARTESIAN.geometryRelatesGeometry(docValueReader, component2D); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to fold constant fields: " + e.getMessage(), e); + } + } + + @Override + Map> evaluatorRules() { + return evaluatorMap; + } + + /** + * To keep the number of evaluators to a minimum, we swap the arguments to get the CONTAINS relation. + * This also makes other optimizations, like lucene-pushdown, simpler to develop. + */ + @Override + public SpatialRelatesFunction surrogate() { + if (left().foldable() && right().foldable() == false) { + return new SpatialContains(source(), right(), left(), rightDocValues, leftDocValues); + } + return this; + } + + private static final Map> evaluatorMap = new HashMap<>(); + + static { + // Support geo_point and geo_shape from source and constant combinations + for (DataType spatialType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + for (DataType otherType : new DataType[] { GEO_POINT, GEO_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields(SpatialWithinGeoSourceAndSourceEvaluator.Factory::new) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory(SpatialWithinGeoSourceAndConstantEvaluator.Factory::new) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialWithinGeoPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialWithinGeoPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + + // Support cartesian_point and cartesian_shape from source and constant combinations + for (DataType spatialType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + for (DataType otherType : new DataType[] { CARTESIAN_POINT, CARTESIAN_SHAPE }) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialWithinCartesianSourceAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialWithinCartesianSourceAndConstantEvaluator.Factory::new + ) + ); + if (EsqlDataTypes.isSpatialPoint(spatialType)) { + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSources(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorFactoryWithFields( + SpatialWithinCartesianPointDocValuesAndSourceEvaluator.Factory::new + ) + ); + evaluatorMap.put( + SpatialEvaluatorFactory.SpatialEvaluatorKey.fromSourceAndConstant(spatialType, otherType).withLeftDocValues(), + new SpatialEvaluatorFactory.SpatialEvaluatorWithConstantFactory( + SpatialWithinCartesianPointDocValuesAndConstantEvaluator.Factory::new + ) + ); + } + } + } + } + + @Evaluator(extraName = "GeoSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processGeoSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return GEO.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return GEO.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "GeoPointDocValuesAndSource", warnExceptions = { IllegalArgumentException.class }) + static boolean processGeoPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return GEO.pointRelatesGeometry(leftValue, geometry); + } + + @Evaluator(extraName = "CartesianSourceAndConstant", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndConstant(BytesRef leftValue, @Fixed Component2D rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianSourceAndSource", warnExceptions = { IllegalArgumentException.class, IOException.class }) + static boolean processCartesianSourceAndSource(BytesRef leftValue, BytesRef rightValue) throws IOException { + return CARTESIAN.geometryRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndConstant", warnExceptions = { IllegalArgumentException.class }) + static boolean processCartesianPointDocValuesAndConstant(long leftValue, @Fixed Component2D rightValue) { + return CARTESIAN.pointRelatesGeometry(leftValue, rightValue); + } + + @Evaluator(extraName = "CartesianPointDocValuesAndSource") + static boolean processCartesianPointDocValuesAndSource(long leftValue, BytesRef rightValue) { + Geometry geometry = SpatialCoordinateTypes.UNSPECIFIED.wkbToGeometry(rightValue); + return CARTESIAN.pointRelatesGeometry(leftValue, geometry); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 536265b1be3e8..21c17110ad4fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -101,7 +101,10 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvZip; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -391,7 +394,9 @@ public static List namedTypeEntries() { of(ScalarFunction.class, Pow.class, PlanNamedTypes::writePow, PlanNamedTypes::readPow), of(ScalarFunction.class, StartsWith.class, PlanNamedTypes::writeStartsWith, PlanNamedTypes::readStartsWith), of(ScalarFunction.class, EndsWith.class, PlanNamedTypes::writeEndsWith, PlanNamedTypes::readEndsWith), - of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeIntersects, PlanNamedTypes::readIntersects), + of(ScalarFunction.class, SpatialIntersects.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readIntersects), + of(ScalarFunction.class, SpatialContains.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readContains), + of(ScalarFunction.class, SpatialWithin.class, PlanNamedTypes::writeSpatialRelatesFunction, PlanNamedTypes::readWithin), of(ScalarFunction.class, Substring.class, PlanNamedTypes::writeSubstring, PlanNamedTypes::readSubstring), of(ScalarFunction.class, Left.class, PlanNamedTypes::writeLeft, PlanNamedTypes::readLeft), of(ScalarFunction.class, Right.class, PlanNamedTypes::writeRight, PlanNamedTypes::readRight), @@ -1494,11 +1499,17 @@ static SpatialIntersects readIntersects(PlanStreamInput in) throws IOException { return new SpatialIntersects(Source.EMPTY, in.readExpression(), in.readExpression()); } - static void writeIntersects(PlanStreamOutput out, SpatialIntersects intersects) throws IOException { - List fields = intersects.children(); - assert fields.size() == 2; - out.writeExpression(fields.get(0)); - out.writeExpression(fields.get(1)); + static SpatialContains readContains(PlanStreamInput in) throws IOException { + return new SpatialContains(Source.EMPTY, in.readExpression(), in.readExpression()); + } + + static SpatialWithin readWithin(PlanStreamInput in) throws IOException { + return new SpatialWithin(Source.EMPTY, in.readExpression(), in.readExpression()); + } + + static void writeSpatialRelatesFunction(PlanStreamOutput out, SpatialRelatesFunction spatialRelatesFunction) throws IOException { + out.writeExpression(spatialRelatesFunction.left()); + out.writeExpression(spatialRelatesFunction.right()); } static Now readNow(PlanStreamInput in) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 3425306863585..ec3ff07a9867f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -120,7 +121,8 @@ protected static Batch substitutions() { new SubstituteSurrogates(), new ReplaceRegexMatch(), new ReplaceAliasingEvalWithProject(), - new SkipQueryOnEmptyMappings() + new SkipQueryOnEmptyMappings(), + new SubstituteSpatialSurrogates() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); } @@ -297,6 +299,25 @@ static String limitToString(String string) { } } + /** + * Currently this works similarly to SurrogateExpression, leaving the logic inside the expressions, + * so each can decide for itself whether or not to change to a surrogate expression. + * But what is actually being done is similar to LiteralsOnTheRight. We can consider in the future moving + * this in either direction, reducing the number of rules, but for now, + * it's a separate rule to reduce the risk of unintended interactions with other rules. + */ + static class SubstituteSpatialSurrogates extends OptimizerRules.OptimizerExpressionRule { + + SubstituteSpatialSurrogates() { + super(TransformDirection.UP); + } + + @Override + protected SpatialRelatesFunction rule(SpatialRelatesFunction function) { + return function.surrogate(); + } + } + static class ConvertStringToByteRef extends OptimizerRules.OptimizerExpressionRule { ConvertStringToByteRef() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 95e275a2d0333..29f0e04ef2b94 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -39,9 +39,14 @@ public class EsqlFeatures implements FeatureSpecification { private static final NodeFeature MV_WARN = new NodeFeature("esql.mv_warn"); /** - * Support for loading {@code geo_point} fields. Added in #102177. + * Support for loading {@code geo_point} and {@code cartesian_point} fields. Added in #102177. */ - private static final NodeFeature GEO_POINT_SUPPORT = new NodeFeature("esql.geo_point"); + private static final NodeFeature SPATIAL_POINTS = new NodeFeature("esql.spatial_points"); + + /** + * Changed precision of {@code geo_point} and {@code cartesian_point} fields, by loading from source into WKB. Done in #103691. + */ + private static final NodeFeature SPATIAL_POINTS_FROM_SOURCE = new NodeFeature("esql.spatial_points_from_source"); /** * When we added the warnings when conversion functions fail. Like {@code TO_INT('foo')}. @@ -55,10 +60,25 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature POW_DOUBLE = new NodeFeature("esql.pow_double"); - // /** - // * Support for loading {@code geo_point} fields. - // */ - // private static final NodeFeature GEO_SHAPE_SUPPORT = new NodeFeature("esql.geo_shape"); + /** + * Support for loading {@code geo_shape} and {@code cartesian_shape} fields. Done in #104269. + */ + private static final NodeFeature SPATIAL_SHAPES = new NodeFeature("esql.spatial_shapes"); + + /** + * Support for spatial aggregation {@code ST_CENTROID}. Done in #104269. + */ + private static final NodeFeature ST_CENTROID = new NodeFeature("esql.st_centroid"); + + /** + * Support for spatial aggregation {@code ST_INTERSECTS}. Done in #104907. + */ + private static final NodeFeature ST_INTERSECTS = new NodeFeature("esql.st_intersects"); + + /** + * Support for spatial aggregation {@code ST_CONTAINS} and {@code ST_WITHIN}. Done in #106503. + */ + private static final NodeFeature ST_CONTAINS_WITHIN = new NodeFeature("esql.st_contains_within"); /** * The introduction of the {@code VALUES} agg. @@ -77,7 +97,19 @@ public class EsqlFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of(ASYNC_QUERY, AGG_VALUES, MV_SORT, DISABLE_NULLABLE_OPTS, ST_X_Y, FROM_OPTIONS); + return Set.of( + ASYNC_QUERY, + AGG_VALUES, + MV_SORT, + DISABLE_NULLABLE_OPTS, + ST_X_Y, + FROM_OPTIONS, + SPATIAL_POINTS_FROM_SOURCE, + SPATIAL_SHAPES, + ST_CENTROID, + ST_INTERSECTS, + ST_CONTAINS_WITHIN + ); } @Override @@ -85,10 +117,9 @@ public Map getHistoricalFeatures() { return Map.ofEntries( Map.entry(TransportEsqlStatsAction.ESQL_STATS_FEATURE, Version.V_8_11_0), Map.entry(MV_WARN, Version.V_8_12_0), - Map.entry(GEO_POINT_SUPPORT, Version.V_8_12_0), + Map.entry(SPATIAL_POINTS, Version.V_8_12_0), Map.entry(CONVERT_WARN, Version.V_8_12_0), Map.entry(POW_DOUBLE, Version.V_8_12_0) - // Map.entry(GEO_SHAPE_SUPPORT, Version.V_8_13_0) ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java index ca69569546ba3..e67ea0cf5624f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SpatialRelatesQuery.java @@ -231,9 +231,9 @@ private static org.apache.lucene.search.Query pointShapeQuery( SearchExecutionContext context ) { final boolean hasDocValues = context.getFieldType(fieldName).hasDocValues(); - // only the intersects relation is supported for indexed cartesian point types - if (relation != ShapeField.QueryRelation.INTERSECTS) { - throw new QueryShardException(context, relation + " query relation not supported for Field [" + fieldName + "]."); + if (geometry.type() != ShapeType.POINT && relation == ShapeField.QueryRelation.CONTAINS) { + // A point field can never contain a non-point geometry + return new MatchNoDocsQuery(); } final Consumer checker = t -> { if (t == ShapeType.POINT || t == ShapeType.MULTIPOINT || t == ShapeType.LINESTRING || t == ShapeType.MULTILINESTRING) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 56869de1b87ca..b97622f28520c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -1223,7 +1223,7 @@ private static void renderFullLayout(String name, boolean hasExamples) throws IO writeToTempDir("layout", rendered, "asciidoc"); } - private static String functionName() { + protected static String functionName() { Class testClass = getTestClass(); if (testClass.isAnnotationPresent(FunctionName.class)) { FunctionName functionNameAnnotation = testClass.getAnnotation(FunctionName.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java new file mode 100644 index 0000000000000..37bfb6eccac5d --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContainsTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("st_contains") +public class SpatialContainsTests extends SpatialRelatesFunctionTestCase { + public SpatialContainsTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialContainsTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialContains(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java index e36d92fecd81f..83679ca7134e4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersectsTests.java @@ -7,38 +7,22 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; -import joptsimple.internal.Strings; - import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; -import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; -import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.Locale; -import java.util.Set; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction.compatibleTypeNames; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; - @FunctionName("st_intersects") - -public class SpatialIntersectsTests extends AbstractFunctionTestCase { +public class SpatialIntersectsTests extends SpatialRelatesFunctionTestCase { public SpatialIntersectsTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -47,9 +31,9 @@ public SpatialIntersectsTests(@Name("TestCase") Supplier parameters() { List suppliers = new ArrayList<>(); DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; - addSpatialCombinations(suppliers, geoDataTypes); + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; - addSpatialCombinations(suppliers, cartesianDataTypes); + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); return parameterSuppliersFromTypedData( errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialIntersectsTests::typeErrorMessage) ); @@ -59,155 +43,4 @@ public static Iterable parameters() { protected Expression build(Source source, List args) { return new SpatialIntersects(source, args.get(0), args.get(1)); } - - private static void addSpatialCombinations(List suppliers, DataType[] dataTypes) { - for (DataType leftType : dataTypes) { - TestCaseSupplier.TypedDataSupplier leftDataSupplier = testCaseSupplier(leftType); - for (DataType rightType : dataTypes) { - if (typeCompatible(leftType, rightType)) { - TestCaseSupplier.TypedDataSupplier rightDataSupplier = testCaseSupplier(rightType); - suppliers.add( - TestCaseSupplier.testCaseSupplier( - leftDataSupplier, - rightDataSupplier, - SpatialIntersectsTests::spatialEvaluatorString, - DataTypes.BOOLEAN, - (l, r) -> expected(l, leftType, r, rightType) - ) - ); - } - } - } - } - - /** - * Build the expected error message for an invalid type signature. - */ - protected static String typeErrorMessage(boolean includeOrdinal, List> validPerPosition, List types) { - List badArgPositions = new ArrayList<>(); - for (int i = 0; i < types.size(); i++) { - if (validPerPosition.get(i).contains(types.get(i)) == false) { - badArgPositions.add(i); - } - } - if (badArgPositions.size() == 0) { - return oneInvalid(1, 0, includeOrdinal, types); - } else if (badArgPositions.size() == 1) { - int badArgPosition = badArgPositions.get(0); - int goodArgPosition = badArgPosition == 0 ? 1 : 0; - if (isSpatial(types.get(goodArgPosition)) == false) { - return oneInvalid(badArgPosition, -1, includeOrdinal, types); - } else { - return oneInvalid(badArgPosition, goodArgPosition, includeOrdinal, types); - } - } else { - return oneInvalid(0, -1, includeOrdinal, types); - } - } - - private static String oneInvalid(int badArgPosition, int goodArgPosition, boolean includeOrdinal, List types) { - String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; - String expectedType = goodArgPosition >= 0 - ? compatibleTypes(types.get(goodArgPosition)) - : "geo_point, cartesian_point, geo_shape or cartesian_shape"; - String name = types.get(badArgPosition).typeName(); - return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; - } - - private static String compatibleTypes(DataType spatialDataType) { - return Strings.join(compatibleTypeNames(spatialDataType), " or "); - } - - private static TestCaseSupplier.TypedDataSupplier testCaseSupplier(DataType dataType) { - return switch (dataType.esType()) { - case "geo_point" -> TestCaseSupplier.geoPointCases(() -> false).get(0); - case "geo_shape" -> TestCaseSupplier.geoShapeCases(() -> false).get(0); - case "cartesian_point" -> TestCaseSupplier.cartesianPointCases(() -> false).get(0); - case "cartesian_shape" -> TestCaseSupplier.cartesianShapeCases(() -> false).get(0); - default -> throw new IllegalArgumentException("Unsupported datatype for ST_INTERSECTS: " + dataType); - }; - } - - private static Object expected(Object left, DataType leftType, Object right, DataType rightType) { - if (typeCompatible(leftType, rightType) == false) { - return null; - } - // TODO cast objects to right type and check intersection - BytesRef leftWKB = asGeometryWKB(left, leftType); - BytesRef rightWKB = asGeometryWKB(right, rightType); - SpatialRelatesFunction.SpatialRelations spatialIntersects = spatialRelations(left, leftType, right, rightType); - try { - return spatialIntersects.geometryRelatesGeometry(leftWKB, rightWKB); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - private static SpatialRelatesFunction.SpatialRelations spatialRelations( - Object left, - DataType leftType, - Object right, - DataType rightType - ) { - if (isSpatialGeo(leftType) || isSpatialGeo(rightType)) { - return SpatialIntersects.GEO; - } else if (isSpatial(leftType) || isSpatial(rightType)) { - return SpatialIntersects.CARTESIAN; - } else { - throw new IllegalArgumentException( - "Unsupported left and right types: left[" - + leftType.esType() - + ":" - + left.getClass().getSimpleName() - + "] right[" - + rightType.esType() - + ":" - + right.getClass().getSimpleName() - + "]" - ); - } - } - - private static BytesRef asGeometryWKB(Object object, DataType dataType) { - if (isString(dataType)) { - return SpatialCoordinateTypes.UNSPECIFIED.wktToWkb(object.toString()); - } else if (object instanceof BytesRef wkb) { - return wkb; - } else { - throw new IllegalArgumentException("Invalid geometry base type for " + dataType + ": " + object.getClass().getSimpleName()); - } - } - - private static boolean typeCompatible(DataType leftType, DataType rightType) { - if (isSpatial(leftType) && isSpatial(rightType)) { - // Both must be GEO_* or both must be CARTESIAN_* - return countGeo(leftType, rightType) != 1; - } - return true; - } - - private static DataType pickSpatialType(DataType leftType, DataType rightType) { - if (isSpatial(leftType)) { - return leftType; - } else if (isSpatial(rightType)) { - return rightType; - } else { - throw new IllegalArgumentException("Invalid spatial types: " + leftType + " and " + rightType); - } - } - - private static String spatialEvaluatorString(DataType leftType, DataType rightType) { - String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; - return "SpatialIntersects" + crsType + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; - } - - private static int countGeo(DataType... types) { - int count = 0; - for (DataType type : types) { - if (isSpatialGeo(type)) { - count++; - } - } - return count; - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java new file mode 100644 index 0000000000000..e905f85141f31 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunctionTestCase.java @@ -0,0 +1,207 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import joptsimple.internal.Strings; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.ql.expression.TypeResolutions; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction.compatibleTypeNames; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatial; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isSpatialGeo; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isString; + +public abstract class SpatialRelatesFunctionTestCase extends AbstractFunctionTestCase { + + private static String getFunctionClassName() { + Class testClass = getTestClass(); + String testClassName = testClass.getSimpleName(); + return testClassName.replace("Tests", ""); + } + + private static Class getSpatialRelatesFunctionClass() throws ClassNotFoundException { + String functionClassName = getFunctionClassName(); + return Class.forName("org.elasticsearch.xpack.esql.expression.function.scalar.spatial." + functionClassName); + } + + private static SpatialRelatesFunction.SpatialRelations getRelationsField(String name) { + try { + Field field = getSpatialRelatesFunctionClass().getField(name); + Object value = field.get(null); + return (SpatialRelatesFunction.SpatialRelations) value; + } catch (NoSuchFieldException | ClassNotFoundException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + protected static void addSpatialCombinations(List suppliers, DataType[] dataTypes) { + for (DataType leftType : dataTypes) { + TestCaseSupplier.TypedDataSupplier leftDataSupplier = testCaseSupplier(leftType); + for (DataType rightType : dataTypes) { + if (typeCompatible(leftType, rightType)) { + TestCaseSupplier.TypedDataSupplier rightDataSupplier = testCaseSupplier(rightType); + suppliers.add( + TestCaseSupplier.testCaseSupplier( + leftDataSupplier, + rightDataSupplier, + SpatialRelatesFunctionTestCase::spatialEvaluatorString, + DataTypes.BOOLEAN, + (l, r) -> expected(l, leftType, r, rightType) + ) + ); + } + } + } + } + + /** + * Build the expected error message for an invalid type signature. + */ + protected static String typeErrorMessage(boolean includeOrdinal, List> validPerPosition, List types) { + List badArgPositions = new ArrayList<>(); + for (int i = 0; i < types.size(); i++) { + if (validPerPosition.get(i).contains(types.get(i)) == false) { + badArgPositions.add(i); + } + } + if (badArgPositions.isEmpty()) { + return oneInvalid(1, 0, includeOrdinal, types); + } else if (badArgPositions.size() == 1) { + int badArgPosition = badArgPositions.get(0); + int goodArgPosition = badArgPosition == 0 ? 1 : 0; + if (isSpatial(types.get(goodArgPosition)) == false) { + return oneInvalid(badArgPosition, -1, includeOrdinal, types); + } else { + return oneInvalid(badArgPosition, goodArgPosition, includeOrdinal, types); + } + } else { + return oneInvalid(0, -1, includeOrdinal, types); + } + } + + private static String oneInvalid(int badArgPosition, int goodArgPosition, boolean includeOrdinal, List types) { + String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; + String expectedType = goodArgPosition >= 0 + ? compatibleTypes(types.get(goodArgPosition)) + : "geo_point, cartesian_point, geo_shape or cartesian_shape"; + String name = types.get(badArgPosition).typeName(); + return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; + } + + private static String compatibleTypes(DataType spatialDataType) { + return Strings.join(compatibleTypeNames(spatialDataType), " or "); + } + + private static TestCaseSupplier.TypedDataSupplier testCaseSupplier(DataType dataType) { + return switch (dataType.esType()) { + case "geo_point" -> TestCaseSupplier.geoPointCases(() -> false).get(0); + case "geo_shape" -> TestCaseSupplier.geoShapeCases(() -> false).get(0); + case "cartesian_point" -> TestCaseSupplier.cartesianPointCases(() -> false).get(0); + case "cartesian_shape" -> TestCaseSupplier.cartesianShapeCases(() -> false).get(0); + default -> throw new IllegalArgumentException("Unsupported datatype for " + functionName() + ": " + dataType); + }; + } + + private static Object expected(Object left, DataType leftType, Object right, DataType rightType) { + if (typeCompatible(leftType, rightType) == false) { + return null; + } + // TODO cast objects to right type and check intersection + BytesRef leftWKB = asGeometryWKB(left, leftType); + BytesRef rightWKB = asGeometryWKB(right, rightType); + SpatialRelatesFunction.SpatialRelations spatialRelations = spatialRelations(left, leftType, right, rightType); + try { + return spatialRelations.geometryRelatesGeometry(leftWKB, rightWKB); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static SpatialRelatesFunction.SpatialRelations spatialRelations( + Object left, + DataType leftType, + Object right, + DataType rightType + ) { + if (isSpatialGeo(leftType) || isSpatialGeo(rightType)) { + return getRelationsField("GEO"); + } else if (isSpatial(leftType) || isSpatial(rightType)) { + return getRelationsField("CARTESIAN"); + } else { + throw new IllegalArgumentException( + "Unsupported left and right types: left[" + + leftType.esType() + + ":" + + left.getClass().getSimpleName() + + "] right[" + + rightType.esType() + + ":" + + right.getClass().getSimpleName() + + "]" + ); + } + } + + private static BytesRef asGeometryWKB(Object object, DataType dataType) { + if (isString(dataType)) { + return SpatialCoordinateTypes.UNSPECIFIED.wktToWkb(object.toString()); + } else if (object instanceof BytesRef wkb) { + return wkb; + } else { + throw new IllegalArgumentException("Invalid geometry base type for " + dataType + ": " + object.getClass().getSimpleName()); + } + } + + private static boolean typeCompatible(DataType leftType, DataType rightType) { + if (isSpatial(leftType) && isSpatial(rightType)) { + // Both must be GEO_* or both must be CARTESIAN_* + return countGeo(leftType, rightType) != 1; + } + return true; + } + + private static DataType pickSpatialType(DataType leftType, DataType rightType) { + if (isSpatial(leftType)) { + return leftType; + } else if (isSpatial(rightType)) { + return rightType; + } else { + throw new IllegalArgumentException("Invalid spatial types: " + leftType + " and " + rightType); + } + } + + private static String spatialEvaluatorString(DataType leftType, DataType rightType) { + String crsType = isSpatialGeo(pickSpatialType(leftType, rightType)) ? "Geo" : "Cartesian"; + return getFunctionClassName() + + crsType + + "SourceAndSourceEvaluator[leftValue=Attribute[channel=0], rightValue=Attribute[channel=1]]"; + } + + private static int countGeo(DataType... types) { + int count = 0; + for (DataType type : types) { + if (isSpatialGeo(type)) { + count++; + } + } + return count; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java new file mode 100644 index 0000000000000..11dbc060b4eb5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithinTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +@FunctionName("st_within") +public class SpatialWithinTests extends SpatialRelatesFunctionTestCase { + public SpatialWithinTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + List suppliers = new ArrayList<>(); + DataType[] geoDataTypes = { EsqlDataTypes.GEO_POINT, EsqlDataTypes.GEO_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, geoDataTypes); + DataType[] cartesianDataTypes = { EsqlDataTypes.CARTESIAN_POINT, EsqlDataTypes.CARTESIAN_SHAPE }; + SpatialRelatesFunctionTestCase.addSpatialCombinations(suppliers, cartesianDataTypes); + return parameterSuppliersFromTypedData( + errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers), SpatialWithinTests::typeErrorMessage) + ); + } + + @Override + protected Expression build(Source source, List args) { + return new SpatialWithin(source, args.get(0), args.get(1)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 650cd2c81115c..180a8ff16f4eb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -42,7 +42,10 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -120,6 +123,7 @@ import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL; import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.PARTIAL; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.expression.Expressions.name; import static org.elasticsearch.xpack.ql.expression.Expressions.names; @@ -2925,6 +2929,174 @@ public void testPushSpatialIntersectsStringToSource() { } } + private record TestSpatialRelation(ShapeRelation relation, TestDataSource index, boolean literalRight, boolean canPushToSource) { + String function() { + return switch (relation) { + case INTERSECTS -> "ST_INTERSECTS"; + case WITHIN -> "ST_WITHIN"; + case CONTAINS -> "ST_CONTAINS"; + default -> throw new IllegalArgumentException("Unsupported relation: " + relation); + }; + } + + Class functionClass() { + return switch (relation) { + case INTERSECTS -> SpatialIntersects.class; + case WITHIN -> literalRight ? SpatialWithin.class : SpatialContains.class; + case CONTAINS -> literalRight ? SpatialContains.class : SpatialWithin.class; + default -> throw new IllegalArgumentException("Unsupported relation: " + relation); + }; + } + + ShapeRelation relationship() { + return switch (relation) { + case WITHIN -> literalRight ? ShapeRelation.WITHIN : ShapeRelation.CONTAINS; + case CONTAINS -> literalRight ? ShapeRelation.CONTAINS : ShapeRelation.WITHIN; + default -> relation; + }; + } + + DataType locationType() { + return index.index.name().endsWith("_web") ? CARTESIAN_POINT : GEO_POINT; + } + + String castFunction() { + return index.index.name().endsWith("_web") ? "TO_CARTESIANSHAPE" : "TO_GEOSHAPE"; + } + + String predicate() { + String field = "location"; + String literal = castFunction() + "(\"POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))\")"; + return literalRight ? function() + "(" + field + ", " + literal + ")" : function() + "(" + literal + ", " + field + ")"; + } + } + + public void testPushDownSpatialRelatesStringToSource() { + TestSpatialRelation[] tests = new TestSpatialRelation[] { + new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, false, true) }; + for (TestSpatialRelation test : tests) { + var plan = this.physicalPlan("FROM " + test.index.index.name() + " | WHERE " + test.predicate(), test.index); + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var limit2 = as(fragment.fragment(), Limit.class); + var filter = as(limit2.child(), Filter.class); + assertThat(test.predicate(), filter.condition(), instanceOf(test.functionClass())); + + var optimized = optimizedPlan(plan); + var topLimit = as(optimized, LimitExec.class); + exchange = as(topLimit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var fieldExtract = as(project.child(), FieldExtractExec.class); + if (test.canPushToSource) { + var source = source(fieldExtract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name: " + test.predicate(), condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship: " + test.predicate(), condition.relation(), equalTo(test.relationship())); + assertThat("Geometry is Polygon: " + test.predicate(), condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length: " + test.predicate(), polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes: " + test.predicate(), polygon.getNumberOfHoles(), equalTo(0)); + } else { + // Currently CARTESIAN fields do not support lucene push-down for CONTAINS/WITHIN + var limitExec = as(fieldExtract.child(), LimitExec.class); + var filterExec = as(limitExec.child(), FilterExec.class); + var fieldExtractLocation = as(filterExec.child(), FieldExtractExec.class); + assertThat(test.predicate(), fieldExtractLocation.attributesToExtract().size(), equalTo(1)); + assertThat(test.predicate(), fieldExtractLocation.attributesToExtract().get(0).name(), equalTo("location")); + var source = source(fieldExtractLocation.child()); + assertThat(test.predicate(), source.query(), equalTo(null)); + } + } + } + + public void testPushDownSpatialRelatesStringToSourceAndUseDocValuesForCentroid() { + TestSpatialRelation[] tests = new TestSpatialRelation[] { + new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, true, true), + new TestSpatialRelation(ShapeRelation.INTERSECTS, airports, false, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airports, true, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airports, false, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airports, true, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airports, false, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.WITHIN, airportsWeb, false, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, true, true), + new TestSpatialRelation(ShapeRelation.CONTAINS, airportsWeb, false, true) }; + for (TestSpatialRelation test : tests) { + var centroidExpr = "centroid=ST_CENTROID(location), count=COUNT()"; + var plan = this.physicalPlan( + "FROM " + test.index.index.name() + " | WHERE " + test.predicate() + " | STATS " + centroidExpr, + test.index + ); + var limit = as(plan, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); + // Before optimization the aggregation does not use doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, test.locationType(), false); + var exchange = as(agg.child(), ExchangeExec.class); + var fragment = as(exchange.child(), FragmentExec.class); + var fAgg = as(fragment.fragment(), Aggregate.class); + var filter = as(fAgg.child(), Filter.class); + assertThat(test.predicate(), filter.condition(), instanceOf(test.functionClass())); + + // Now verify that optimization re-writes the ExchangeExec and pushed down the filter into the Lucene query + var optimized = optimizedPlan(plan); + limit = as(optimized, LimitExec.class); + agg = as(limit.child(), AggregateExec.class); + // Above the exchange (in coordinator) the aggregation is not using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, test.locationType(), false); + exchange = as(agg.child(), ExchangeExec.class); + agg = as(exchange.child(), AggregateExec.class); + assertThat("Aggregation is PARTIAL", agg.getMode(), equalTo(PARTIAL)); + // below the exchange (in data node) the aggregation is using doc-values + assertAggregation(agg, "count", Count.class); + assertAggregation(agg, "centroid", SpatialCentroid.class, test.locationType(), true); + if (test.canPushToSource) { + var extract = as(agg.child(), FieldExtractExec.class); + assertTrue( + "Expect field attribute to be extracted as doc-values", + extract.attributesToExtract() + .stream() + .allMatch(attr -> extract.hasDocValuesAttribute(attr) && attr.dataType() == test.locationType()) + ); + var source = source(extract.child()); + // TODO: bring back SingleValueQuery once it can handle LeafShapeFieldData + // var condition = as(sv(source.query(), "location"), AbstractGeometryQueryBuilder.class); + var condition = as(source.query(), SpatialRelatesQuery.ShapeQueryBuilder.class); + assertThat("Geometry field name: " + test.predicate(), condition.fieldName(), equalTo("location")); + assertThat("Spatial relationship: " + test.predicate(), condition.relation(), equalTo(test.relationship())); + assertThat("Geometry is Polygon: " + test.predicate(), condition.shape().type(), equalTo(ShapeType.POLYGON)); + var polygon = as(condition.shape(), Polygon.class); + assertThat("Polygon shell length: " + test.predicate(), polygon.getPolygon().length(), equalTo(5)); + assertThat("Polygon holes: " + test.predicate(), polygon.getNumberOfHoles(), equalTo(0)); + } else { + // Currently CARTESIAN fields do not support lucene push-down for CONTAINS/WITHIN + var filterExec = as(agg.child(), FilterExec.class); + var fieldExtractLocation = as(filterExec.child(), FieldExtractExec.class); + assertThat(test.predicate(), fieldExtractLocation.attributesToExtract().size(), equalTo(1)); + assertThat(test.predicate(), fieldExtractLocation.attributesToExtract().get(0).name(), equalTo("location")); + var source = source(fieldExtractLocation.child()); + assertThat(test.predicate(), source.query(), equalTo(null)); + + } + } + } + /** * Plan: * Plan: From 7f17effb4ff96008218fee529b8fd5f8ab5fcb83 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 2 Apr 2024 13:03:35 +0200 Subject: [PATCH 67/69] Implement rollover for failure stores (#106715) Allows rolling over failure stores through the existing RolloverRequest by specifying `IndicesOptions.FailureStoreOptions` on that request. --- .../DataStreamGetWriteIndexTests.java | 17 +- ...etadataDataStreamRolloverServiceTests.java | 15 +- .../200_rollover_failure_store.yml | 116 ++++++++++ .../rest-api-spec/api/indices.rollover.json | 4 + .../org/elasticsearch/TransportVersions.java | 1 + .../indices/rollover/LazyRolloverAction.java | 3 +- .../rollover/MetadataRolloverService.java | 200 +++++++++++------- .../indices/rollover/RolloverRequest.java | 30 ++- .../rollover/TransportRolloverAction.java | 30 ++- .../cluster/metadata/DataStream.java | 71 ++++++- .../MetadataCreateDataStreamService.java | 31 ++- .../indices/RestRolloverIndexAction.java | 12 ++ ...adataRolloverServiceAutoShardingTests.java | 42 ++-- .../MetadataRolloverServiceTests.java | 137 ++++++++++-- .../rollover/RolloverRequestTests.java | 41 ++++ .../TransportRolloverActionTests.java | 3 +- .../cluster/metadata/DataStreamTests.java | 26 +++ .../MetadataCreateDataStreamServiceTests.java | 49 +++-- .../metadata/DataStreamTestHelper.java | 25 ++- 19 files changed, 685 insertions(+), 168 deletions(-) create mode 100644 modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java index b61cbdc837010..111a46bb7098b 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamGetWriteIndexTests.java @@ -277,7 +277,8 @@ public void setup() throws Exception { createIndexService, indexAliasesService, EmptySystemIndices.INSTANCE, - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + clusterService ); } @@ -318,7 +319,19 @@ private MetadataRolloverService.RolloverResult rolloverOver(ClusterState state, MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong()); List> metConditions = Collections.singletonList(condition); CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); - return rolloverService.rolloverClusterState(state, name, null, createIndexRequest, metConditions, time, false, false, null, null); + return rolloverService.rolloverClusterState( + state, + name, + null, + createIndexRequest, + metConditions, + time, + false, + false, + null, + null, + false + ); } private Index getWriteIndex(ClusterState state, String name, String timestamp) { diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java index 2bfbeb8e37aaf..61f0efe89504d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java @@ -120,7 +120,8 @@ public void testRolloverClusterStateForDataStream() throws Exception { randomBoolean(), false, indexStats, - null + null, + false ); long after = testThreadPool.absoluteTimeInMillis(); @@ -220,7 +221,8 @@ public void testRolloverAndMigrateDataStream() throws Exception { randomBoolean(), false, null, - null + null, + false ); String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); @@ -313,7 +315,8 @@ public void testChangingIndexModeFromTimeSeriesToSomethingElseNoEffectOnExisting randomBoolean(), false, null, - null + null, + false ); String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); @@ -379,7 +382,8 @@ public void testRolloverClusterStateWithBrokenOlderTsdbDataStream() throws Excep randomBoolean(), false, indexStats, - null + null, + false ); long after = testThreadPool.absoluteTimeInMillis(); @@ -460,7 +464,8 @@ public void testRolloverClusterStateWithBrokenTsdbDataStream() throws Exception randomBoolean(), false, indexStats, - null + null, + false ) ); assertThat(e.getMessage(), containsString("is overlapping with backing index")); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml new file mode 100644 index 0000000000000..82c757fc4af76 --- /dev/null +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -0,0 +1,116 @@ +--- +setup: + - skip: + version: " - 8.13.99" + reason: "data stream failure store rollover only supported in 8.14+" + features: allowed_warnings + + - do: + allowed_warnings: + - "index template [my-template] has index patterns [data-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template] will take precedence during new index creation" + indices.put_index_template: + name: my-template + body: + index_patterns: [data-*] + data_stream: + failure_store: true + template: + mappings: + properties: + '@timestamp': + type: date + count: + type: long + + - do: + indices.create_data_stream: + name: data-stream-for-rollover + +--- +"Roll over a data stream's failure store without conditions": + # rollover data stream to create new backing index + - do: + indices.rollover: + alias: "data-stream-for-rollover" + failure_store: true + + - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } + - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } + - match: { rolled_over: true } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + # Both backing and failure indices use the same generation field. + - match: { data_streams.0.generation: 2 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_indices: 2 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + +--- +"Roll over a data stream's failure store with conditions": + # index first document and wait for refresh + - do: + index: + index: data-stream-for-rollover + refresh: true + body: + '@timestamp': '2020-12-12' + count: 'invalid value' + + # rollover data stream to create new backing index + - do: + indices.rollover: + alias: "data-stream-for-rollover" + failure_store: true + body: + conditions: + max_docs: 1 + + - match: { old_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } + - match: { new_index: "/\\.fs-data-stream-for-rollover-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000002/" } + - match: { rolled_over: true } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + # Both backing and failure indices use the same generation field. + - match: { data_streams.0.generation: 2 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_indices: 2 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + +--- +"Don't roll over a data stream's failure store when conditions aren't met": + # rollover data stream to create new backing index + - do: + indices.rollover: + alias: "data-stream-for-rollover" + failure_store: true + body: + conditions: + max_docs: 1 + + - match: { rolled_over: false } + - match: { dry_run: false } + + - do: + indices.get_data_stream: + name: "*" + - match: { data_streams.0.name: data-stream-for-rollover } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - match: { data_streams.0.generation: 1 } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_indices: 1 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index 47a1bee665506..e3c06ab080597 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -63,6 +63,10 @@ "type":"boolean", "default":"false", "description":"If set to true, the rollover action will only mark a data stream to signal that it needs to be rolled over at the next write. Only allowed on data streams." + }, + "failure_store":{ + "type":"boolean", + "description":"If set to true, the rollover action will be applied on the failure store of the data stream." } }, "body":{ diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index d7bc07b3eb2b4..b0649c9429884 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -159,6 +159,7 @@ static TransportVersion def(int id) { public static final TransportVersion ESQL_ES_SOURCE_OPTIONS = def(8_618_00_0); public static final TransportVersion ADD_PERSISTENT_TASK_EXCEPTIONS = def(8_619_00_0); public static final TransportVersion ESQL_REDUCER_NODE_FRAGMENT = def(8_620_00_0); + public static final TransportVersion FAILURE_STORE_ROLLOVER = def(8_621_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java index 623186e052eb7..e8d63affcb8bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java @@ -98,7 +98,8 @@ protected void masterOperation( clusterState, rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), - rolloverRequest.getCreateIndexRequest() + rolloverRequest.getCreateIndexRequest(), + false ); final String trialSourceIndexName = trialRolloverNames.sourceName(); final String trialRolloverIndexName = trialRolloverNames.rolloverName(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index b03353a11793f..75852098170c6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -26,11 +26,13 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataCreateDataStreamService; import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.cluster.metadata.MetadataDataStreamsService; import org.elasticsearch.cluster.metadata.MetadataIndexAliasesService; import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -76,6 +78,7 @@ public class MetadataRolloverService { private final MetadataIndexAliasesService indexAliasesService; private final SystemIndices systemIndices; private final WriteLoadForecaster writeLoadForecaster; + private final ClusterService clusterService; @Inject public MetadataRolloverService( @@ -83,13 +86,15 @@ public MetadataRolloverService( MetadataCreateIndexService createIndexService, MetadataIndexAliasesService indexAliasesService, SystemIndices systemIndices, - WriteLoadForecaster writeLoadForecaster + WriteLoadForecaster writeLoadForecaster, + ClusterService clusterService ) { this.threadPool = threadPool; this.createIndexService = createIndexService; this.indexAliasesService = indexAliasesService; this.systemIndices = systemIndices; this.writeLoadForecaster = writeLoadForecaster; + this.clusterService = clusterService; } public record RolloverResult(String rolloverIndexName, String sourceIndexName, ClusterState clusterState) { @@ -116,9 +121,10 @@ public RolloverResult rolloverClusterState( boolean silent, boolean onlyValidate, @Nullable IndexMetadataStats sourceIndexStats, - @Nullable AutoShardingResult autoShardingResult + @Nullable AutoShardingResult autoShardingResult, + boolean isFailureStoreRollover ) throws Exception { - validate(currentState.metadata(), rolloverTarget, newIndexName, createIndexRequest); + validate(currentState.metadata(), rolloverTarget, newIndexName, createIndexRequest, isFailureStoreRollover); final IndexAbstraction indexAbstraction = currentState.metadata().getIndicesLookup().get(rolloverTarget); return switch (indexAbstraction.getType()) { case ALIAS -> rolloverAlias( @@ -141,7 +147,8 @@ public RolloverResult rolloverClusterState( silent, onlyValidate, sourceIndexStats, - autoShardingResult + autoShardingResult, + isFailureStoreRollover ); default -> // the validate method above prevents this case @@ -160,13 +167,19 @@ public static NameResolution resolveRolloverNames( ClusterState currentState, String rolloverTarget, String newIndexName, - CreateIndexRequest createIndexRequest + CreateIndexRequest createIndexRequest, + boolean isFailureStoreRollover ) { - validate(currentState.metadata(), rolloverTarget, newIndexName, createIndexRequest); + validate(currentState.metadata(), rolloverTarget, newIndexName, createIndexRequest, isFailureStoreRollover); final IndexAbstraction indexAbstraction = currentState.metadata().getIndicesLookup().get(rolloverTarget); return switch (indexAbstraction.getType()) { case ALIAS -> resolveAliasRolloverNames(currentState.metadata(), indexAbstraction, newIndexName); - case DATA_STREAM -> resolveDataStreamRolloverNames(currentState.getMetadata(), (DataStream) indexAbstraction); + case DATA_STREAM -> { + if (isFailureStoreRollover) { + yield resolveDataStreamFailureStoreRolloverNames(currentState.metadata(), (DataStream) indexAbstraction); + } + yield resolveDataStreamRolloverNames(currentState.getMetadata(), (DataStream) indexAbstraction); + } default -> // the validate method above prevents this case throw new IllegalStateException("unable to roll over type [" + indexAbstraction.getType().getDisplayName() + "]"); @@ -190,6 +203,17 @@ private static NameResolution resolveDataStreamRolloverNames(Metadata metadata, return new NameResolution(originalWriteIndex.getIndex().getName(), null, dataStream.nextWriteIndexAndGeneration(metadata).v1()); } + private static NameResolution resolveDataStreamFailureStoreRolloverNames(Metadata metadata, DataStream dataStream) { + assert dataStream.getFailureStoreWriteIndex() != null : "Unable to roll over failure store with no failure store indices"; + + final IndexMetadata originalWriteIndex = metadata.index(dataStream.getFailureStoreWriteIndex()); + return new NameResolution( + originalWriteIndex.getIndex().getName(), + null, + dataStream.nextFailureStoreWriteIndexAndGeneration(metadata).v1() + ); + } + private RolloverResult rolloverAlias( ClusterState currentState, IndexAbstraction.Alias alias, @@ -252,7 +276,8 @@ private RolloverResult rolloverDataStream( boolean silent, boolean onlyValidate, @Nullable IndexMetadataStats sourceIndexStats, - @Nullable AutoShardingResult autoShardingResult + @Nullable AutoShardingResult autoShardingResult, + boolean isFailureStoreRollover ) throws Exception { if (SnapshotsService.snapshottingDataStreams(currentState, Collections.singleton(dataStream.getName())).isEmpty() == false) { @@ -280,8 +305,10 @@ private RolloverResult rolloverDataStream( templateV2 = systemDataStreamDescriptor.getComposableIndexTemplate(); } - final Index originalWriteIndex = dataStream.getWriteIndex(); - final Tuple nextIndexAndGeneration = dataStream.nextWriteIndexAndGeneration(currentState.metadata()); + final Index originalWriteIndex = isFailureStoreRollover ? dataStream.getFailureStoreWriteIndex() : dataStream.getWriteIndex(); + final Tuple nextIndexAndGeneration = isFailureStoreRollover + ? dataStream.nextFailureStoreWriteIndexAndGeneration(currentState.metadata()) + : dataStream.nextWriteIndexAndGeneration(currentState.metadata()); final String newWriteIndexName = nextIndexAndGeneration.v1(); final long newGeneration = nextIndexAndGeneration.v2(); MetadataCreateIndexService.validateIndexName(newWriteIndexName, currentState); // fails if the index already exists @@ -289,72 +316,87 @@ private RolloverResult rolloverDataStream( return new RolloverResult(newWriteIndexName, originalWriteIndex.getName(), currentState); } - DataStreamAutoShardingEvent dataStreamAutoShardingEvent = autoShardingResult == null - ? dataStream.getAutoShardingEvent() - : switch (autoShardingResult.type()) { - case NO_CHANGE_REQUIRED, COOLDOWN_PREVENTED_INCREASE, COOLDOWN_PREVENTED_DECREASE -> { - if (dataStream.getAutoShardingEvent() != null) { - logger.info( - "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", - dataStreamName, - dataStream.getAutoShardingEvent() + ClusterState newState; + if (isFailureStoreRollover) { + newState = MetadataCreateDataStreamService.createFailureStoreIndex( + createIndexService, + "rollover_failure_store", + clusterService.getSettings(), + currentState, + now.toEpochMilli(), + dataStreamName, + templateV2, + newWriteIndexName, + (builder, indexMetadata) -> builder.put(dataStream.rolloverFailureStore(indexMetadata.getIndex(), newGeneration)) + ); + } else { + DataStreamAutoShardingEvent dataStreamAutoShardingEvent = autoShardingResult == null + ? dataStream.getAutoShardingEvent() + : switch (autoShardingResult.type()) { + case NO_CHANGE_REQUIRED, COOLDOWN_PREVENTED_INCREASE, COOLDOWN_PREVENTED_DECREASE -> { + if (dataStream.getAutoShardingEvent() != null) { + logger.info( + "Rolling over data stream [{}] using existing auto-sharding recommendation [{}]", + dataStreamName, + dataStream.getAutoShardingEvent() + ); + } + yield dataStream.getAutoShardingEvent(); + } + case INCREASE_SHARDS, DECREASE_SHARDS -> { + logger.info("Auto sharding data stream [{}] to [{}]", dataStreamName, autoShardingResult); + yield new DataStreamAutoShardingEvent( + dataStream.getWriteIndex().getName(), + autoShardingResult.targetNumberOfShards(), + now.toEpochMilli() ); } - yield dataStream.getAutoShardingEvent(); - } - case INCREASE_SHARDS, DECREASE_SHARDS -> { - logger.info("Auto sharding data stream [{}] to [{}]", dataStreamName, autoShardingResult); - yield new DataStreamAutoShardingEvent( - dataStream.getWriteIndex().getName(), - autoShardingResult.targetNumberOfShards(), - now.toEpochMilli() - ); - } - // data sharding might not be available due to the feature not being available/enabled or due to cluster level excludes - // being configured. the index template will dictate the number of shards as usual - case NOT_APPLICABLE -> { - logger.debug("auto sharding is not applicable for data stream [{}]", dataStreamName); - yield null; - } - }; - - // configure the number of shards using an auto sharding event (new, or existing) if we have one - if (dataStreamAutoShardingEvent != null) { - Settings settingsWithAutoSharding = Settings.builder() - .put(createIndexRequest.settings()) - .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), dataStreamAutoShardingEvent.targetNumberOfShards()) - .build(); - createIndexRequest.settings(settingsWithAutoSharding); - } + // data sharding might not be available due to the feature not being available/enabled or due to cluster level excludes + // being configured. the index template will dictate the number of shards as usual + case NOT_APPLICABLE -> { + logger.debug("auto sharding is not applicable for data stream [{}]", dataStreamName); + yield null; + } + }; + + // configure the number of shards using an auto sharding event (new, or existing) if we have one + if (dataStreamAutoShardingEvent != null) { + Settings settingsWithAutoSharding = Settings.builder() + .put(createIndexRequest.settings()) + .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), dataStreamAutoShardingEvent.targetNumberOfShards()) + .build(); + createIndexRequest.settings(settingsWithAutoSharding); + } - var createIndexClusterStateRequest = prepareDataStreamCreateIndexRequest( - dataStreamName, - newWriteIndexName, - createIndexRequest, - systemDataStreamDescriptor, - now - ); - createIndexClusterStateRequest.setMatchingTemplate(templateV2); - assert createIndexClusterStateRequest.performReroute() == false - : "rerouteCompletionIsNotRequired() assumes reroute is not called by underlying service"; + var createIndexClusterStateRequest = prepareDataStreamCreateIndexRequest( + dataStreamName, + newWriteIndexName, + createIndexRequest, + systemDataStreamDescriptor, + now + ); + createIndexClusterStateRequest.setMatchingTemplate(templateV2); + assert createIndexClusterStateRequest.performReroute() == false + : "rerouteCompletionIsNotRequired() assumes reroute is not called by underlying service"; - ClusterState newState = createIndexService.applyCreateIndexRequest( - currentState, - createIndexClusterStateRequest, - silent, - (builder, indexMetadata) -> { - downgradeBrokenTsdbBackingIndices(dataStream, builder); - builder.put( - dataStream.rollover( - indexMetadata.getIndex(), - newGeneration, - metadata.isTimeSeriesTemplate(templateV2), - dataStreamAutoShardingEvent - ) - ); - }, - rerouteCompletionIsNotRequired() - ); + newState = createIndexService.applyCreateIndexRequest( + currentState, + createIndexClusterStateRequest, + silent, + (builder, indexMetadata) -> { + downgradeBrokenTsdbBackingIndices(dataStream, builder); + builder.put( + dataStream.rollover( + indexMetadata.getIndex(), + newGeneration, + metadata.isTimeSeriesTemplate(templateV2), + dataStreamAutoShardingEvent + ) + ); + }, + rerouteCompletionIsNotRequired() + ); + } RolloverInfo rolloverInfo = new RolloverInfo(dataStreamName, metConditions, threadPool.absoluteTimeInMillis()); @@ -561,7 +603,13 @@ static void checkNoDuplicatedAliasInIndexTemplate( } } - static void validate(Metadata metadata, String rolloverTarget, String newIndexName, CreateIndexRequest request) { + static void validate( + Metadata metadata, + String rolloverTarget, + String newIndexName, + CreateIndexRequest request, + boolean isFailureStoreRollover + ) { final IndexAbstraction indexAbstraction = metadata.getIndicesLookup().get(rolloverTarget); if (indexAbstraction == null) { throw new IllegalArgumentException("rollover target [" + rolloverTarget + "] does not exist"); @@ -591,6 +639,12 @@ static void validate(Metadata metadata, String rolloverTarget, String newIndexNa "aliases, mappings, and index settings may not be specified when rolling over a data stream" ); } + var dataStream = (DataStream) indexAbstraction; + if (isFailureStoreRollover && dataStream.isFailureStore() == false) { + throw new IllegalArgumentException( + "unable to roll over failure store because [" + indexAbstraction.getName() + "] does not have the failure store enabled" + ); + } } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 06046a066d211..28ef2f644af04 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -101,6 +101,7 @@ public class RolloverRequest extends AcknowledgedRequest implem private RolloverConditions conditions = new RolloverConditions(); // the index name "_na_" is never read back, what matters are settings, mappings and aliases private CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + private IndicesOptions indicesOptions = IndicesOptions.strictSingleIndexNoExpandForbidClosed(); public RolloverRequest(StreamInput in) throws IOException { super(in); @@ -114,6 +115,9 @@ public RolloverRequest(StreamInput in) throws IOException { } else { lazy = false; } + if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_ROLLOVER)) { + indicesOptions = IndicesOptions.readIndicesOptions(in); + } } RolloverRequest() {} @@ -138,6 +142,18 @@ public ActionRequestValidationException validate() { ); } + var failureStoreOptions = indicesOptions.failureStoreOptions(); + if (failureStoreOptions.includeRegularIndices() && failureStoreOptions.includeFailureIndices()) { + validationException = addValidationError( + "rollover cannot be applied to both regular and failure indices at the same time", + validationException + ); + } + + if (failureStoreOptions.includeFailureIndices() && lazy) { + validationException = addValidationError("lazily rolling over a failure store is currently not supported", validationException); + } + return validationException; } @@ -152,6 +168,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { out.writeBoolean(lazy); } + if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_ROLLOVER)) { + indicesOptions.writeIndicesOptions(out); + } } @Override @@ -161,7 +180,11 @@ public String[] indices() { @Override public IndicesOptions indicesOptions() { - return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + return indicesOptions; + } + + public void setIndicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; } @Override @@ -282,11 +305,12 @@ public boolean equals(Object o) { && Objects.equals(rolloverTarget, that.rolloverTarget) && Objects.equals(newIndexName, that.newIndexName) && Objects.equals(conditions, that.conditions) - && Objects.equals(createIndexRequest, that.createIndexRequest); + && Objects.equals(createIndexRequest, that.createIndexRequest) + && Objects.equals(indicesOptions, that.indicesOptions); } @Override public int hashCode() { - return Objects.hash(rolloverTarget, newIndexName, dryRun, conditions, createIndexRequest, lazy); + return Objects.hash(rolloverTarget, newIndexName, dryRun, conditions, createIndexRequest, lazy, indicesOptions); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index bd507ee9054f1..2a92496d5695a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -141,11 +141,14 @@ public TransportRolloverAction( @Override protected ClusterBlockException checkBlock(RolloverRequest request, ClusterState state) { - IndicesOptions indicesOptions = IndicesOptions.fromOptions( - true, - true, - request.indicesOptions().expandWildcardsOpen(), - request.indicesOptions().expandWildcardsClosed() + final var indicesOptions = new IndicesOptions( + IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS, + IndicesOptions.WildcardOptions.builder() + .matchOpen(request.indicesOptions().expandWildcardsOpen()) + .matchClosed(request.indicesOptions().expandWildcardsClosed()) + .build(), + IndicesOptions.GatekeeperOptions.DEFAULT, + request.indicesOptions().failureStoreOptions() ); return state.blocks() @@ -170,7 +173,8 @@ protected void masterOperation( clusterState, rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), - rolloverRequest.getCreateIndexRequest() + rolloverRequest.getCreateIndexRequest(), + rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() ); final String trialSourceIndexName = trialRolloverNames.sourceName(); final String trialRolloverIndexName = trialRolloverNames.rolloverName(); @@ -225,9 +229,15 @@ protected void masterOperation( return; } + final var statsIndicesOptions = new IndicesOptions( + IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS, + IndicesOptions.WildcardOptions.builder().matchClosed(true).allowEmptyExpressions(false).build(), + IndicesOptions.GatekeeperOptions.DEFAULT, + rolloverRequest.indicesOptions().failureStoreOptions() + ); IndicesStatsRequest statsRequest = new IndicesStatsRequest().indices(rolloverRequest.getRolloverTarget()) .clear() - .indicesOptions(IndicesOptions.fromOptions(true, false, true, true)) + .indicesOptions(statsIndicesOptions) .docs(true) .indexing(true); statsRequest.setParentTask(clusterService.localNode().getId(), task.getId()); @@ -444,7 +454,8 @@ public ClusterState executeTask( currentState, rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), - rolloverRequest.getCreateIndexRequest() + rolloverRequest.getCreateIndexRequest(), + rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() ); // Re-evaluate the conditions, now with our final source index name @@ -494,7 +505,8 @@ public ClusterState executeTask( false, false, sourceIndexStats, - rolloverTask.autoShardingResult() + rolloverTask.autoShardingResult(), + rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() ); results.add(rolloverResult); logger.trace("rollover result [{}]", rolloverResult); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 776fb9fd87740..f1a508b803d4d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -22,6 +22,7 @@ import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling.Round; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -511,12 +512,47 @@ public DataStream unsafeRollover(Index writeIndex, long generation, boolean time } /** - * Performs a dummy rollover on a {@code DataStream} instance and returns the tuple of the next write index name and next generation - * that this {@code DataStream} should roll over to using {@link #rollover(Index, long, boolean, DataStreamAutoShardingEvent)}. + * Performs a rollover on the failure store of a {@code DataStream} instance and returns a new instance containing + * the updated list of failure store indices and incremented generation. * - * @param clusterMetadata Cluster metadata + * @param writeIndex new failure store write index + * @param generation new generation + * @return new {@code DataStream} instance with the rollover operation applied + */ + public DataStream rolloverFailureStore(Index writeIndex, long generation) { + ensureNotReplicated(); + + return unsafeRolloverFailureStore(writeIndex, generation); + } + + /** + * Like {@link #rolloverFailureStore(Index, long)}, but does no validation, use with care only. + */ + public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) { + List failureIndices = new ArrayList<>(this.failureIndices); + failureIndices.add(writeIndex); + return new DataStream( + name, + indices, + generation, + metadata, + hidden, + false, + system, + allowCustomRouting, + indexMode, + lifecycle, + failureStore, + failureIndices, + autoShardingEvent + ); + } + + /** + * Generates the next write index name and generation to be used for rolling over this data stream. * - * @return new {@code DataStream} instance with the dummy rollover operation applied + * @param clusterMetadata Cluster metadata + * @return tuple of the next write index name and next generation. */ public Tuple nextWriteIndexAndGeneration(Metadata clusterMetadata) { ensureNotReplicated(); @@ -527,11 +563,36 @@ public Tuple nextWriteIndexAndGeneration(Metadata clusterMetadata) * Like {@link #nextWriteIndexAndGeneration(Metadata)}, but does no validation, use with care only. */ public Tuple unsafeNextWriteIndexAndGeneration(Metadata clusterMetadata) { + return generateNextWriteIndexAndGeneration(clusterMetadata, DataStream::getDefaultBackingIndexName); + } + + /** + * Generates the next write index name and generation to be used for rolling over the failure store of this data stream. + * + * @param clusterMetadata Cluster metadata + * @return tuple of the next failure store write index name and next generation. + */ + public Tuple nextFailureStoreWriteIndexAndGeneration(Metadata clusterMetadata) { + ensureNotReplicated(); + return unsafeNextFailureStoreWriteIndexAndGeneration(clusterMetadata); + } + + /** + * Like {@link #nextFailureStoreWriteIndexAndGeneration(Metadata)}, but does no validation, use with care only. + */ + public Tuple unsafeNextFailureStoreWriteIndexAndGeneration(Metadata clusterMetadata) { + return generateNextWriteIndexAndGeneration(clusterMetadata, DataStream::getDefaultFailureStoreName); + } + + private Tuple generateNextWriteIndexAndGeneration( + Metadata clusterMetadata, + TriFunction nameGenerator + ) { String newWriteIndexName; long generation = this.generation; long currentTimeMillis = timeProvider.getAsLong(); do { - newWriteIndexName = DataStream.getDefaultBackingIndexName(getName(), ++generation, currentTimeMillis); + newWriteIndexName = nameGenerator.apply(getName(), ++generation, currentTimeMillis); } while (clusterMetadata.hasIndexAbstraction(newWriteIndexName)); return Tuple.tuple(newWriteIndexName, generation); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index fd67a8ac7e230..6c933ba1480df 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -46,6 +47,7 @@ import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.isDataStreamsLifecycleOnlyMode; @@ -265,6 +267,10 @@ static ClusterState createDataStream( final ComposableIndexTemplate template = isSystem ? systemDataStreamDescriptor.getComposableIndexTemplate() : lookupTemplateForDataStream(dataStreamName, currentState.metadata()); + // The initial backing index and the initial failure store index will have the same initial generation. + // This is not a problem as both have different prefixes (`.ds-` vs `.fs-`) and both will be using the same `generation` field + // when rolling over in the future. + final long initialGeneration = 1; // If we need to create a failure store, do so first. Do not reroute during the creation since we will do // that as part of creating the backing index if required. @@ -273,21 +279,23 @@ static ClusterState createDataStream( if (isSystem) { throw new IllegalArgumentException("Failure stores are not supported on system data streams"); } - String failureStoreIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, 1, request.getStartTime()); + String failureStoreIndexName = DataStream.getDefaultFailureStoreName(dataStreamName, initialGeneration, request.getStartTime()); currentState = createFailureStoreIndex( metadataCreateIndexService, + "initialize_data_stream", settings, currentState, - request, + request.getStartTime(), dataStreamName, template, - failureStoreIndexName + failureStoreIndexName, + null ); failureStoreIndex = currentState.metadata().index(failureStoreIndexName); } if (writeIndex == null) { - String firstBackingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, 1, request.getStartTime()); + String firstBackingIndexName = DataStream.getDefaultBackingIndexName(dataStreamName, initialGeneration, request.getStartTime()); currentState = createBackingIndex( metadataCreateIndexService, currentState, @@ -322,7 +330,7 @@ static ClusterState createDataStream( DataStream newDataStream = new DataStream( dataStreamName, dsBackingIndices, - 1L, + initialGeneration, template.metadata() != null ? Map.copyOf(template.metadata()) : null, hidden, false, @@ -399,14 +407,16 @@ private static ClusterState createBackingIndex( return currentState; } - private static ClusterState createFailureStoreIndex( + public static ClusterState createFailureStoreIndex( MetadataCreateIndexService metadataCreateIndexService, + String cause, Settings settings, ClusterState currentState, - CreateDataStreamClusterStateUpdateRequest request, + long nameResolvedInstant, String dataStreamName, ComposableIndexTemplate template, - String failureStoreIndexName + String failureStoreIndexName, + @Nullable BiConsumer metadataTransformer ) throws Exception { if (DataStream.isFailureStoreEnabled() == false) { return currentState; @@ -423,11 +433,11 @@ private static ClusterState createFailureStoreIndex( } CreateIndexClusterStateUpdateRequest createIndexRequest = new CreateIndexClusterStateUpdateRequest( - "initialize_data_stream", + cause, failureStoreIndexName, failureStoreIndexName ).dataStreamName(dataStreamName) - .nameResolvedInstant(request.getStartTime()) + .nameResolvedInstant(nameResolvedInstant) .performReroute(false) .setMatchingTemplate(template) .settings(indexSettings); @@ -437,6 +447,7 @@ private static ClusterState createFailureStoreIndex( currentState, createIndexRequest, false, + metadataTransformer, AllocationActionListener.rerouteCompletionIsNotRequired() ); } catch (ResourceAlreadyExistsException e) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index b22c79230ef3c..a0796c0f95639 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -10,7 +10,9 @@ import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.BaseRestHandler; @@ -51,6 +53,16 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC rolloverIndexRequest.lazy(request.paramAsBoolean("lazy", false)); rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); + if (DataStream.isFailureStoreEnabled()) { + boolean failureStore = request.paramAsBoolean("failure_store", false); + if (failureStore) { + rolloverIndexRequest.setIndicesOptions( + IndicesOptions.builder(rolloverIndexRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .build() + ); + } + } rolloverIndexRequest.getCreateIndexRequest() .waitForActiveShards(ActiveShardCount.parseString(request.param("wait_for_active_shards"))); return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).admin() diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceAutoShardingTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceAutoShardingTests.java index fd21e0c27099e..906b2434f7d39 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceAutoShardingTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceAutoShardingTests.java @@ -107,7 +107,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(INCREASE_SHARDS, 3, 5, TimeValue.ZERO, 64.33) + new AutoShardingResult(INCREASE_SHARDS, 3, 5, TimeValue.ZERO, 64.33), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 5); } @@ -126,7 +127,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33) + new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33), + false ); assertRolloverResult( dataStream, @@ -152,7 +154,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33) + new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33), + false ); assertRolloverResult( dataStream, @@ -182,7 +185,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(COOLDOWN_PREVENTED_INCREASE, 3, 5, TimeValue.timeValueMinutes(10), 64.33) + new AutoShardingResult(COOLDOWN_PREVENTED_INCREASE, 3, 5, TimeValue.timeValueMinutes(10), 64.33), + false ); // the expected number of shards remains 3 for the data stream due to the remaining cooldown assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), List.of(), 3); @@ -198,7 +202,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(COOLDOWN_PREVENTED_DECREASE, 3, 1, TimeValue.timeValueMinutes(10), 64.33) + new AutoShardingResult(COOLDOWN_PREVENTED_DECREASE, 3, 1, TimeValue.timeValueMinutes(10), 64.33), + false ); // the expected number of shards remains 3 for the data stream due to the remaining cooldown assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), List.of(), 3); @@ -215,7 +220,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(NO_CHANGE_REQUIRED, 3, 3, TimeValue.ZERO, 2.33) + new AutoShardingResult(NO_CHANGE_REQUIRED, 3, 3, TimeValue.ZERO, 2.33), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 3); } @@ -231,7 +237,8 @@ public void testRolloverDataStreamWithoutExistingAutosharding() throws Exception randomBoolean(), false, null, - new AutoShardingResult(NOT_APPLICABLE, 1, 1, TimeValue.MAX_VALUE, null) + new AutoShardingResult(NOT_APPLICABLE, 1, 1, TimeValue.MAX_VALUE, null), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 3); } @@ -303,7 +310,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(INCREASE_SHARDS, 3, 5, TimeValue.ZERO, 64.33) + new AutoShardingResult(INCREASE_SHARDS, 3, 5, TimeValue.ZERO, 64.33), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 5); } @@ -322,7 +330,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33) + new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33), + false ); assertRolloverResult( dataStream, @@ -348,7 +357,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33) + new AutoShardingResult(DECREASE_SHARDS, 3, 1, TimeValue.ZERO, 0.33), + false ); assertRolloverResult( dataStream, @@ -371,7 +381,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(COOLDOWN_PREVENTED_INCREASE, 3, 5, TimeValue.timeValueMinutes(10), 64.33) + new AutoShardingResult(COOLDOWN_PREVENTED_INCREASE, 3, 5, TimeValue.timeValueMinutes(10), 64.33), + false ); // the expected number of shards remains 3 for the data stream due to the remaining cooldown assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), List.of(), 3); @@ -387,7 +398,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(COOLDOWN_PREVENTED_DECREASE, 3, 1, TimeValue.timeValueMinutes(10), 64.33) + new AutoShardingResult(COOLDOWN_PREVENTED_DECREASE, 3, 1, TimeValue.timeValueMinutes(10), 64.33), + false ); // the expected number of shards remains 3 for the data stream due to the remaining cooldown assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), List.of(), 3); @@ -404,7 +416,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(NO_CHANGE_REQUIRED, 3, 3, TimeValue.ZERO, 2.33) + new AutoShardingResult(NO_CHANGE_REQUIRED, 3, 3, TimeValue.ZERO, 2.33), + false ); assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 3); } @@ -420,7 +433,8 @@ public void testRolloverDataStreamWithExistingAutoShardEvent() throws Exception randomBoolean(), false, null, - new AutoShardingResult(NOT_APPLICABLE, 1, 1, TimeValue.MAX_VALUE, null) + new AutoShardingResult(NOT_APPLICABLE, 1, 1, TimeValue.MAX_VALUE, null), + false ); // if the auto sharding is not applicable we just use whatever's in the index template (1 shard in this case) assertRolloverResult(dataStream, rolloverResult, before, testThreadPool.absoluteTimeInMillis(), metConditions, 1); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index 23905c9445d18..d386eb40aea43 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.IndicesOptions.FailureStoreOptions; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasAction; @@ -28,6 +29,7 @@ import org.elasticsearch.cluster.metadata.MetadataIndexAliasesService; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.routing.allocation.WriteLoadForecaster; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -199,26 +201,26 @@ public void testAliasValidation() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, aliasWithNoWriteIndex, randomAlphaOfLength(5), req) + () -> MetadataRolloverService.validate(metadata, aliasWithNoWriteIndex, randomAlphaOfLength(5), req, false) ); assertThat(exception.getMessage(), equalTo("rollover target [" + aliasWithNoWriteIndex + "] does not point to a write index")); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomFrom(index1, index2), randomAlphaOfLength(5), req) + () -> MetadataRolloverService.validate(metadata, randomFrom(index1, index2), randomAlphaOfLength(5), req, false) ); assertThat(exception.getMessage(), equalTo("rollover target is a [concrete index] but one of [alias,data_stream] was expected")); final String aliasName = randomAlphaOfLength(5); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, aliasName, randomAlphaOfLength(5), req) + () -> MetadataRolloverService.validate(metadata, aliasName, randomAlphaOfLength(5), req, false) ); assertThat(exception.getMessage(), equalTo("rollover target [" + aliasName + "] does not exist")); - MetadataRolloverService.validate(metadata, aliasWithWriteIndex, randomAlphaOfLength(5), req); + MetadataRolloverService.validate(metadata, aliasWithWriteIndex, randomAlphaOfLength(5), req, false); } public void testDataStreamValidation() throws IOException { Metadata.Builder md = Metadata.builder(); - DataStream randomDataStream = DataStreamTestHelper.randomInstance(); + DataStream randomDataStream = DataStreamTestHelper.randomInstance(false); for (Index index : randomDataStream.getIndices()) { md.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index)); } @@ -226,18 +228,18 @@ public void testDataStreamValidation() throws IOException { Metadata metadata = md.build(); CreateIndexRequest req = new CreateIndexRequest(); - MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, req); + MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, req, false); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), randomAlphaOfLength(5), req) + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), randomAlphaOfLength(5), req, false) ); assertThat(exception.getMessage(), equalTo("new index name may not be specified when rolling over a data stream")); CreateIndexRequest aliasReq = new CreateIndexRequest().alias(new Alias("no_aliases_permitted")); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, aliasReq) + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, aliasReq, false) ); assertThat( exception.getMessage(), @@ -248,7 +250,7 @@ public void testDataStreamValidation() throws IOException { CreateIndexRequest mappingReq = new CreateIndexRequest().mapping(mapping); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, mappingReq) + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, mappingReq, false) ); assertThat( exception.getMessage(), @@ -258,12 +260,23 @@ public void testDataStreamValidation() throws IOException { CreateIndexRequest settingReq = new CreateIndexRequest().settings(Settings.builder().put("foo", "bar")); exception = expectThrows( IllegalArgumentException.class, - () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, settingReq) + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, settingReq, false) ); assertThat( exception.getMessage(), equalTo("aliases, mappings, and index settings may not be specified when rolling over a data stream") ); + + exception = expectThrows( + IllegalArgumentException.class, + () -> MetadataRolloverService.validate(metadata, randomDataStream.getName(), null, req, true) + ); + assertThat( + exception.getMessage(), + equalTo( + "unable to roll over failure store because [" + randomDataStream.getName() + "] does not have the failure store enabled" + ) + ); } public void testGenerateRolloverIndexName() { @@ -548,7 +561,8 @@ public void testRolloverClusterState() throws Exception { randomBoolean(), false, null, - null + null, + false ); long after = testThreadPool.absoluteTimeInMillis(); @@ -606,6 +620,7 @@ public void testRolloverClusterStateForDataStream() throws Exception { List> metConditions = Collections.singletonList(condition); CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + String sourceIndexName = dataStream.getWriteIndex().getName(); long before = testThreadPool.absoluteTimeInMillis(); MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState( clusterState, @@ -617,11 +632,11 @@ public void testRolloverClusterStateForDataStream() throws Exception { randomBoolean(), false, null, - null + null, + false ); long after = testThreadPool.absoluteTimeInMillis(); - String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); String newIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); assertEquals(newIndexName, rolloverResult.rolloverIndexName()); @@ -646,19 +661,101 @@ public void testRolloverClusterStateForDataStream() throws Exception { } } + public void testRolloverClusterStateForDataStreamFailureStore() throws Exception { + final DataStream dataStream = DataStreamTestHelper.randomInstance(true) + // ensure no replicate data stream + .promoteDataStream(); + ComposableIndexTemplate template = ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStream.getName() + "*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) + .build(); + Metadata.Builder builder = Metadata.builder(); + builder.put("template", template); + dataStream.getIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); + dataStream.getFailureIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); + builder.put(dataStream); + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); + + ThreadPool testThreadPool = new TestThreadPool(getTestName()); + try { + MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService( + dataStream, + testThreadPool, + Set.of(), + xContentRegistry() + ); + + MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong()); + List> metConditions = Collections.singletonList(condition); + CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + + long before = testThreadPool.absoluteTimeInMillis(); + MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState( + clusterState, + dataStream.getName(), + null, + createIndexRequest, + metConditions, + Instant.now(), + randomBoolean(), + false, + null, + null, + true + ); + long after = testThreadPool.absoluteTimeInMillis(); + + var epochMillis = System.currentTimeMillis(); + String sourceIndexName = DataStream.getDefaultFailureStoreName(dataStream.getName(), dataStream.getGeneration(), epochMillis); + String newIndexName = DataStream.getDefaultFailureStoreName(dataStream.getName(), dataStream.getGeneration() + 1, epochMillis); + assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); + assertEquals(newIndexName, rolloverResult.rolloverIndexName()); + Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); + assertEquals(dataStream.getIndices().size() + dataStream.getFailureIndices().size() + 1, rolloverMetadata.indices().size()); + IndexMetadata rolloverIndexMetadata = rolloverMetadata.index(newIndexName); + + var ds = (DataStream) rolloverMetadata.getIndicesLookup().get(dataStream.getName()); + assertThat(ds.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM)); + assertThat(ds.getIndices(), hasSize(dataStream.getIndices().size())); + assertThat(ds.getFailureIndices(), hasSize(dataStream.getFailureIndices().size() + 1)); + assertThat(ds.getFailureIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex())); + assertThat(ds.getFailureIndices(), hasItem(rolloverIndexMetadata.getIndex())); + assertThat(ds.getFailureStoreWriteIndex(), equalTo(rolloverIndexMetadata.getIndex())); + + RolloverInfo info = rolloverMetadata.index(sourceIndexName).getRolloverInfos().get(dataStream.getName()); + assertThat(info.getTime(), lessThanOrEqualTo(after)); + assertThat(info.getTime(), greaterThanOrEqualTo(before)); + assertThat(info.getMetConditions(), hasSize(1)); + assertThat(info.getMetConditions().get(0).value(), equalTo(condition.value())); + } finally { + testThreadPool.shutdown(); + } + } + public void testValidation() throws Exception { final String rolloverTarget; final String sourceIndexName; final String defaultRolloverIndexName; final boolean useDataStream = randomBoolean(); final Metadata.Builder builder = Metadata.builder(); + var failureStoreOptions = FailureStoreOptions.DEFAULT; if (useDataStream) { DataStream dataStream = DataStreamTestHelper.randomInstance() // ensure no replicate data stream .promoteDataStream(); rolloverTarget = dataStream.getName(); - sourceIndexName = dataStream.getIndices().get(dataStream.getIndices().size() - 1).getName(); - defaultRolloverIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); + if (dataStream.isFailureStore() && randomBoolean()) { + failureStoreOptions = new FailureStoreOptions(false, true); + sourceIndexName = dataStream.getFailureStoreWriteIndex().getName(); + defaultRolloverIndexName = DataStream.getDefaultFailureStoreName( + dataStream.getName(), + dataStream.getGeneration() + 1, + System.currentTimeMillis() + ); + } else { + sourceIndexName = dataStream.getIndices().get(dataStream.getIndices().size() - 1).getName(); + defaultRolloverIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); + } ComposableIndexTemplate template = ComposableIndexTemplate.builder() .indexPatterns(List.of(dataStream.getName() + "*")) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) @@ -684,12 +781,14 @@ public void testValidation() throws Exception { MetadataCreateIndexService createIndexService = mock(MetadataCreateIndexService.class); MetadataIndexAliasesService metadataIndexAliasesService = mock(MetadataIndexAliasesService.class); + ClusterService clusterService = mock(ClusterService.class); MetadataRolloverService rolloverService = new MetadataRolloverService( null, createIndexService, metadataIndexAliasesService, EmptySystemIndices.INSTANCE, - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + clusterService ); String newIndexName = useDataStream == false && randomBoolean() ? "logs-index-9" : null; @@ -704,7 +803,8 @@ public void testValidation() throws Exception { randomBoolean(), true, null, - null + null, + failureStoreOptions.includeFailureIndices() ); newIndexName = newIndexName == null ? defaultRolloverIndexName : newIndexName; @@ -746,7 +846,8 @@ public void testRolloverClusterStateForDataStreamNoTemplate() throws Exception { false, randomBoolean(), null, - null + null, + false ) ); assertThat(e.getMessage(), equalTo("no matching index template found for data stream [" + dataStream.getName() + "]")); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index b34045b50654c..b6c0b5047ab77 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -175,6 +176,12 @@ public void testSerialize() throws Exception { .build() ); originalRequest.lazy(randomBoolean()); + originalRequest.setIndicesOptions( + IndicesOptions.builder(originalRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(randomBoolean(), randomBoolean())) + .build() + ); + try (BytesStreamOutput out = new BytesStreamOutput()) { originalRequest.writeTo(out); BytesReference bytes = out.bytes(); @@ -183,6 +190,10 @@ public void testSerialize() throws Exception { assertThat(cloneRequest.getNewIndexName(), equalTo(originalRequest.getNewIndexName())); assertThat(cloneRequest.getRolloverTarget(), equalTo(originalRequest.getRolloverTarget())); assertThat(cloneRequest.isLazy(), equalTo(originalRequest.isLazy())); + assertThat( + cloneRequest.indicesOptions().failureStoreOptions(), + equalTo(originalRequest.indicesOptions().failureStoreOptions()) + ); for (Map.Entry> entry : cloneRequest.getConditions().getConditions().entrySet()) { Condition condition = originalRequest.getConditions().getConditions().get(entry.getKey()); // here we compare the string representation as there is some information loss when serializing @@ -247,6 +258,36 @@ public void testValidation() { ActionRequestValidationException validationException = rolloverRequest.validate(); assertNull(validationException); } + + { + RolloverRequest rolloverRequest = new RolloverRequest("alias-index", "new-index-name"); + rolloverRequest.setIndicesOptions( + IndicesOptions.builder(rolloverRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(true, true)) + .build() + ); + ActionRequestValidationException validationException = rolloverRequest.validate(); + assertNotNull(validationException); + assertEquals(1, validationException.validationErrors().size()); + assertEquals( + "rollover cannot be applied to both regular and failure indices at the same time", + validationException.validationErrors().get(0) + ); + } + + { + RolloverRequest rolloverRequest = new RolloverRequest("alias-index", "new-index-name"); + rolloverRequest.setIndicesOptions( + IndicesOptions.builder(rolloverRequest.indicesOptions()) + .failureStoreOptions(new IndicesOptions.FailureStoreOptions(false, true)) + .build() + ); + rolloverRequest.lazy(true); + ActionRequestValidationException validationException = rolloverRequest.validate(); + assertNotNull(validationException); + assertEquals(1, validationException.validationErrors().size()); + assertEquals("lazily rolling over a failure store is currently not supported", validationException.validationErrors().get(0)); + } } public void testParsingWithType() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index db156f983220e..9faa6c4ba2d3f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -110,7 +110,8 @@ public class TransportRolloverActionTests extends ESTestCase { mockCreateIndexService, mdIndexAliasesService, EmptySystemIndices.INSTANCE, - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + mockClusterService ); final DataStreamAutoShardingService dataStreamAutoShardingService = new DataStreamAutoShardingService( diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 56d36d8fb18b0..3187a3e391691 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -253,6 +253,20 @@ public void testRolloverDowngradeToRegularDataStream() { assertThat(rolledDs.getIndexMode(), nullValue()); } + public void testRolloverFailureStore() { + DataStream ds = DataStreamTestHelper.randomInstance(true).promoteDataStream(); + Tuple newCoordinates = ds.nextFailureStoreWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + final DataStream rolledDs = ds.rolloverFailureStore(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); + assertThat(rolledDs.getName(), equalTo(ds.getName())); + assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); + assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size())); + assertThat(rolledDs.getFailureIndices().size(), equalTo(ds.getFailureIndices().size() + 1)); + assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); + assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertTrue(rolledDs.getFailureIndices().containsAll(ds.getFailureIndices())); + assertTrue(rolledDs.getFailureIndices().contains(rolledDs.getFailureStoreWriteIndex())); + } + public void testRemoveBackingIndex() { int numBackingIndices = randomIntBetween(2, 32); int indexToRemove = randomIntBetween(1, numBackingIndices - 1); @@ -508,6 +522,18 @@ public void testDefaultBackingIndexName() { assertThat(defaultBackingIndexName, equalTo(expectedBackingIndexName)); } + public void testDefaultFailureStoreName() { + // this test does little more than flag that changing the default naming convention for failure store indices + // will also require changing a lot of hard-coded values in REST tests and docs + long failureStoreIndexNum = randomLongBetween(1, 1000001); + String dataStreamName = randomAlphaOfLength(6); + long epochMillis = randomLongBetween(1580536800000L, 1583042400000L); + String dateString = DataStream.DATE_FORMATTER.formatMillis(epochMillis); + String defaultFailureStoreName = DataStream.getDefaultFailureStoreName(dataStreamName, failureStoreIndexNum, epochMillis); + String expectedFailureStoreName = Strings.format(".fs-%s-%s-%06d", dataStreamName, dateString, failureStoreIndexNum); + assertThat(defaultFailureStoreName, equalTo(expectedFailureStoreName)); + } + public void testReplaceBackingIndex() { int numBackingIndices = randomIntBetween(2, 32); int indexToReplace = randomIntBetween(1, numBackingIndices - 1) - 1; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java index ea79bc8f13765..3df777c1f0eef 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.indices.SystemIndices.Feature; import org.elasticsearch.test.ESTestCase; +import org.mockito.stubbing.Answer; import java.util.ArrayList; import java.util.HashMap; @@ -465,29 +466,33 @@ public static ClusterState createDataStream(final String dataStreamName) throws private static MetadataCreateIndexService getMetadataCreateIndexService() throws Exception { MetadataCreateIndexService s = mock(MetadataCreateIndexService.class); when(s.getSystemIndices()).thenReturn(getSystemIndices()); - when(s.applyCreateIndexRequest(any(ClusterState.class), any(CreateIndexClusterStateUpdateRequest.class), anyBoolean(), any())) - .thenAnswer(mockInvocation -> { - ClusterState currentState = (ClusterState) mockInvocation.getArguments()[0]; - CreateIndexClusterStateUpdateRequest request = (CreateIndexClusterStateUpdateRequest) mockInvocation.getArguments()[1]; + Answer objectAnswer = mockInvocation -> { + ClusterState currentState = (ClusterState) mockInvocation.getArguments()[0]; + CreateIndexClusterStateUpdateRequest request = (CreateIndexClusterStateUpdateRequest) mockInvocation.getArguments()[1]; - Metadata.Builder b = Metadata.builder(currentState.metadata()) - .put( - IndexMetadata.builder(request.index()) - .settings( - Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put(request.settings()) - .build() - ) - .putMapping(generateMapping("@timestamp")) - .system(getSystemIndices().isSystemName(request.index())) - .numberOfShards(1) - .numberOfReplicas(1) - .build(), - false - ); - return ClusterState.builder(currentState).metadata(b.build()).build(); - }); + Metadata.Builder b = Metadata.builder(currentState.metadata()) + .put( + IndexMetadata.builder(request.index()) + .settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(request.settings()) + .build() + ) + .putMapping(generateMapping("@timestamp")) + .system(getSystemIndices().isSystemName(request.index())) + .numberOfShards(1) + .numberOfReplicas(1) + .build(), + false + ); + return ClusterState.builder(currentState).metadata(b.build()).build(); + }; + when(s.applyCreateIndexRequest(any(ClusterState.class), any(CreateIndexClusterStateUpdateRequest.class), anyBoolean(), any())) + .thenAnswer(objectAnswer); + when( + s.applyCreateIndexRequest(any(ClusterState.class), any(CreateIndexClusterStateUpdateRequest.class), anyBoolean(), any(), any()) + ).thenAnswer(objectAnswer); return s; } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 4cc019a300e8b..2980b8a48636a 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -315,16 +315,24 @@ public static DataStream randomInstance() { return randomInstance(System::currentTimeMillis); } + public static DataStream randomInstance(boolean failureStore) { + return randomInstance(System::currentTimeMillis, failureStore); + } + public static DataStream randomInstance(String name) { - return randomInstance(name, System::currentTimeMillis); + return randomInstance(name, System::currentTimeMillis, randomBoolean()); } public static DataStream randomInstance(LongSupplier timeProvider) { + return randomInstance(timeProvider, randomBoolean()); + } + + public static DataStream randomInstance(LongSupplier timeProvider, boolean failureStore) { String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); - return randomInstance(dataStreamName, timeProvider); + return randomInstance(dataStreamName, timeProvider, failureStore); } - public static DataStream randomInstance(String dataStreamName, LongSupplier timeProvider) { + public static DataStream randomInstance(String dataStreamName, LongSupplier timeProvider, boolean failureStore) { List indices = randomIndexInstances(); long generation = indices.size() + ESTestCase.randomLongBetween(1, 128); indices.add(new Index(getDefaultBackingIndexName(dataStreamName, generation), UUIDs.randomBase64UUID(LuceneTestCase.random()))); @@ -333,9 +341,15 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time metadata = Map.of("key", "value"); } List failureIndices = List.of(); - boolean failureStore = randomBoolean(); + generation = generation + ESTestCase.randomLongBetween(1, 128); if (failureStore) { failureIndices = randomNonEmptyIndexInstances(); + failureIndices.add( + new Index( + getDefaultFailureStoreName(dataStreamName, generation, System.currentTimeMillis()), + UUIDs.randomBase64UUID(LuceneTestCase.random()) + ) + ); } return new DataStream( @@ -679,7 +693,8 @@ public static MetadataRolloverService getMetadataRolloverService( createIndexService, indexAliasesService, EmptySystemIndices.INSTANCE, - WriteLoadForecaster.DEFAULT + WriteLoadForecaster.DEFAULT, + clusterService ); } From e56dcee07832c75eca32e5d044b5a24f5ab0161f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Tue, 2 Apr 2024 13:34:46 +0200 Subject: [PATCH 68/69] [Inference API] Add completion task type docs (#106876) --- .../inference/post-inference.asciidoc | 37 ++++++++++++++++++- .../inference/put-inference.asciidoc | 34 ++++++++++++++++- 2 files changed, 67 insertions(+), 4 deletions(-) diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index e4cbd26904271..08a58d7789e33 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -33,8 +33,8 @@ own model, use the <>. ==== {api-description-title} The perform {infer} API enables you to use {ml} models to perform specific tasks -on data that you provide as an input. The API returns a response with the -resutls of the tasks. The {infer} model you use can perform one specific task +on data that you provide as an input. The API returns a response with the +results of the tasks. The {infer} model you use can perform one specific task that has been defined when the model was created with the <>. @@ -60,6 +60,10 @@ The type of {infer} task that the model performs. (Required, array of strings) The text on which you want to perform the {infer} task. `input` can be a single string or an array. +[NOTE] +==== +Inference endpoints for the `completion` task type currently only support a single string as input. +==== [discrete] @@ -108,3 +112,32 @@ The API returns the following response: } ------------------------------------------------------------ // NOTCONSOLE + + +The next example performs a completion on the example question. + + +[source,console] +------------------------------------------------------------ +POST _inference/completion/openai_chat_completions +{ + "input": "What is Elastic?" +} +------------------------------------------------------------ +// TEST[skip:TBD] + + +The API returns the following response: + + +[source,console-result] +------------------------------------------------------------ +{ + "completion": [ + { + "result": "Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management." + } + ] +} +------------------------------------------------------------ +// NOTCONSOLE diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index c0b9d508e13c3..110ec9d6fa98c 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -58,7 +58,8 @@ The unique identifier of the {infer} endpoint. (Required, string) The type of the {infer} task that the model will perform. Available task types: * `sparse_embedding`, -* `text_embedding`. +* `text_embedding`, +* `completion` [discrete] @@ -101,7 +102,7 @@ the same name and the updated API key. (Optional, string) Specifies the types of embeddings you want to get back. Defaults to `float`. Valid values are: - * `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). + * `byte`: use it for signed int8 embeddings (this is a synonym of `int8`). * `float`: use it for the default float embeddings. * `int8`: use it for signed int8 embeddings. @@ -232,6 +233,18 @@ maximum token length. Defaults to `END`. Valid values are: the input is discarded. * `END`: when the input exceeds the maximum input token length the end of the input is discarded. + +`user`::: +(optional, string) +For `openai` service only. Specifies the user issuing the request, which can be used for abuse detection. +===== ++ +.`task_settings` for the `completion` task type +[%collapsible%closed] +===== +`user`::: +(optional, string) +For `openai` service only. Specifies the user issuing the request, which can be used for abuse detection. ===== @@ -402,3 +415,20 @@ PUT _inference/text_embedding/openai_embeddings } ------------------------------------------------------------ // TEST[skip:TBD] + +The next example shows how to create an {infer} endpoint called +`openai_completion` to perform a `completion` task type. + +[source,console] +------------------------------------------------------------ +PUT _inference/completion/openai_completion +{ + "service": "openai", + "service_settings": { + "api_key": "", + "model_id": "gpt-3.5-turbo" + } +} +------------------------------------------------------------ +// TEST[skip:TBD] + From 480ee1c7b574d4606ca4e4fd17ae7e2ffe43feb2 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 2 Apr 2024 14:38:59 +0200 Subject: [PATCH 69/69] Convert FetchFieldsContext to a record (#106991) Simple refactoring to save a few lines of code. --- .../search/fetch/subphase/FetchFieldsContext.java | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java index c7fde5f77f067..5f5db5e533648 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchFieldsContext.java @@ -12,14 +12,4 @@ /** * The context needed to retrieve fields. */ -public class FetchFieldsContext { - private final List fields; - - public FetchFieldsContext(List fields) { - this.fields = fields; - } - - public List fields() { - return fields; - } -} +public record FetchFieldsContext(List fields) {}